From d5a71313e099dc4e336d3b198eaea60ac9e91075 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 23 May 2023 10:27:57 +0200 Subject: [PATCH 1/5] Disable rewrites in OutlineParser --- compiler/src/dotty/tools/dotc/parsing/Parsers.scala | 6 +++--- compiler/src/dotty/tools/dotc/parsing/Scanners.scala | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 3079b26df6cd..f6c0474dafb2 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -169,9 +169,9 @@ object Parsers { } } - class Parser(source: SourceFile)(using Context) extends ParserCommon(source) { + class Parser(source: SourceFile, allowRewrite: Boolean = true)(using Context) extends ParserCommon(source) { - val in: Scanner = new Scanner(source, profile = Profile.current) + val in: Scanner = new Scanner(source, profile = Profile.current, allowRewrite = allowRewrite) // in.debugTokenStream = true // uncomment to see the token stream of the standard scanner, but not syntax highlighting /** This is the general parse entry point. @@ -4361,7 +4361,7 @@ object Parsers { /** OutlineParser parses top-level declarations in `source` to find declared classes, ignoring their bodies (which * must only have balanced braces). This is used to map class names to defining sources. */ - class OutlineParser(source: SourceFile)(using Context) extends Parser(source) with OutlineParserCommon { + class OutlineParser(source: SourceFile)(using Context) extends Parser(source, allowRewrite = false) with OutlineParserCommon { def skipBracesHook(): Option[Tree] = if (in.token == XMLSTART) Some(xmlLiteral()) else None diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index fac73bfb4992..e29b858e0978 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -170,7 +170,7 @@ object Scanners { errorButContinue(em"trailing separator is not allowed", offset + litBuf.length - 1) } - class Scanner(source: SourceFile, override val startFrom: Offset = 0, profile: Profile = NoProfile, allowIndent: Boolean = true)(using Context) extends ScannerCommon(source) { + class Scanner(source: SourceFile, override val startFrom: Offset = 0, profile: Profile = NoProfile, allowRewrite: Boolean = true, allowIndent: Boolean = true)(using Context) extends ScannerCommon(source) { val keepComments = !ctx.settings.YdropComments.value /** A switch whether operators at the start of lines can be infix operators */ @@ -179,7 +179,7 @@ object Scanners { var debugTokenStream = false val showLookAheadOnDebug = false - val rewrite = ctx.settings.rewrite.value.isDefined + val rewrite = allowRewrite && ctx.settings.rewrite.value.isDefined val oldSyntax = ctx.settings.oldSyntax.value val newSyntax = ctx.settings.newSyntax.value From b949156ca12a7096b784e55a3829da89c8f0346c Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 16 May 2023 16:17:33 +0200 Subject: [PATCH 2/5] Patch indentation with -indent -rewrite Ensure indentation is correct when removing braces. If the first indentation of the region is greater than the indentation of the enclosing region, we use it to indent the whole region. Otherwise we use the incremented indentation of the enclosing region. ```scala def foo = { x // we replicate indentation of x downward in region y } ``` ```scala def foo = { x // indentation of x is incorrect, we increment enclosing indentation y } ``` A bigger indentation than the required one is permitted except just after a closing brace. ```scala def bar = { x .toString // permitted indentation def foo = { } bar // must be unindented, to not fall into the body of foo } ``` --- .../dotty/tools/dotc/parsing/Parsers.scala | 615 ++++++++++-------- .../dotty/tools/dotc/parsing/Scanners.scala | 61 +- .../dotc/parsing/xml/MarkupParsers.scala | 5 +- .../dotty/tools/dotc/rewrites/Rewrites.scala | 14 +- .../dotty/tools/dotc/CompilationTests.scala | 11 +- .../tools/vulpix/TestConfiguration.scala | 1 + tests/pos/indent-colons.scala | 5 + tests/rewrites/indent-3-spaces.check | 21 + tests/rewrites/indent-3-spaces.scala | 26 + tests/rewrites/indent-comments.check | 25 + tests/rewrites/indent-comments.scala | 27 + tests/rewrites/indent-mix-brace.check | 17 + tests/rewrites/indent-mix-brace.scala | 21 + tests/rewrites/indent-mix-tab-space.check | 22 + tests/rewrites/indent-mix-tab-space.scala | 27 + tests/rewrites/indent-rewrite.check | 233 +++++++ tests/rewrites/indent-rewrite.scala | 265 ++++++++ 17 files changed, 1097 insertions(+), 299 deletions(-) create mode 100644 tests/rewrites/indent-3-spaces.check create mode 100644 tests/rewrites/indent-3-spaces.scala create mode 100644 tests/rewrites/indent-comments.check create mode 100644 tests/rewrites/indent-comments.scala create mode 100644 tests/rewrites/indent-mix-brace.check create mode 100644 tests/rewrites/indent-mix-brace.scala create mode 100644 tests/rewrites/indent-mix-tab-space.check create mode 100644 tests/rewrites/indent-mix-tab-space.scala create mode 100644 tests/rewrites/indent-rewrite.check create mode 100644 tests/rewrites/indent-rewrite.scala diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index f6c0474dafb2..d286ad384ac9 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -27,7 +27,7 @@ import ScriptParsers._ import Decorators._ import util.Chars import scala.annotation.tailrec -import rewrites.Rewrites.{patch, overlapsPatch} +import rewrites.Rewrites.{patch, patchOver, overlapsPatch} import reporting._ import config.Feature import config.Feature.{sourceVersion, migrateTo3, globalOnlyImports} @@ -297,7 +297,7 @@ object Parsers { val offset = in.offset if in.token != token then syntaxErrorOrIncomplete(ExpectedTokenButFound(token, in.token)) - if in.token == token then in.nextToken() + if in.token == token then nextToken() offset def accept(name: Name): Int = { @@ -305,20 +305,20 @@ object Parsers { if !isIdent(name) then syntaxErrorOrIncomplete(em"`$name` expected") if isIdent(name) then - in.nextToken() + nextToken() offset } def acceptColon(): Int = val offset = in.offset - if in.isColon then { in.nextToken(); offset } + if in.isColon then { nextToken(); offset } else accept(COLONop) /** semi = nl {nl} | `;' * nl = `\n' // where allowed */ def acceptStatSep(): Unit = - if in.isNewLine then in.nextToken() else accept(SEMI) + if in.isNewLine then nextToken() else accept(SEMI) /** Parse statement separators and end markers. Ensure that there is at least * one statement separator unless the next token terminates a statement´sequence. @@ -333,7 +333,7 @@ object Parsers { def statSepOrEnd[T <: Tree](stats: ListBuffer[T], noPrevStat: Boolean = false, what: String = "statement", altEnd: Token = EOF): Boolean = def recur(sepSeen: Boolean, endSeen: Boolean): Boolean = if isStatSep then - in.nextToken() + nextToken() recur(true, endSeen) else if in.token == END then if endSeen then syntaxError(em"duplicate end marker") @@ -352,7 +352,7 @@ object Parsers { if mustStartStatTokens.contains(found) then false // it's a statement that might be legal in an outer context else - in.nextToken() // needed to ensure progress; otherwise we might cycle forever + nextToken() // needed to ensure progress; otherwise we might cycle forever skip() true @@ -559,18 +559,21 @@ object Parsers { def inBraces[T](body: => T): T = enclosed(LBRACE, body) def inBrackets[T](body: => T): T = enclosed(LBRACKET, body) - def inBracesOrIndented[T](body: => T, rewriteWithColon: Boolean = false): T = + def inBracesOrIndented[T](body: => T, inStatSeq: Boolean = false, rewriteWithColon: Boolean = false): T = + val followsArrow = in.last.token == ARROW if in.token == INDENT then - val rewriteToBraces = in.rewriteNoIndent - && !testChars(in.lastOffset - 3, " =>") // braces are always optional after `=>` so none should be inserted + // braces are always optional after `=>` so none should be inserted + val rewriteToBraces = in.rewriteNoIndent && !followsArrow + val rewriteToIndent = in.rewriteToIndent && !followsArrow if rewriteToBraces then indentedToBraces(body) + else if rewriteToIndent then enclosed(INDENT, toIndentedRegion(body)) else enclosed(INDENT, body) else - if in.rewriteToIndent then bracesToIndented(body, rewriteWithColon) + if in.rewriteToIndent then bracesToIndented(body, inStatSeq, rewriteWithColon) else inBraces(body) - def inDefScopeBraces[T](body: => T, rewriteWithColon: Boolean = false): T = - inBracesOrIndented(body, rewriteWithColon) + def inDefScopeBraces[T](body: => T, inStatSeq: Boolean = false, rewriteWithColon: Boolean = false): T = + inBracesOrIndented(body, inStatSeq, rewriteWithColon) /** {`,` } */ def commaSeparated[T](part: () => T): List[T] = @@ -586,7 +589,7 @@ object Parsers { if in.token == COMMA then val ts = new ListBuffer[T] += leading while in.token == COMMA do - in.nextToken() + nextToken() ts += part() ts.toList else leading :: Nil @@ -638,27 +641,32 @@ object Parsers { /* -------- REWRITES ----------------------------------------------------------- */ - /** The last offset where a colon at the end of line would be required if a subsequent { ... } - * block would be converted to an indentation region. - */ - var possibleColonOffset: Int = -1 + object IndentRewriteState: + /** The last offset where a colon at the end of line would be required if a subsequent { ... } + * block would be converted to an indentation region. */ + var possibleColonOffset: Int = -1 + + /** When rewriting to indent, the minimum indent width to rewrite to */ + var minimumIndent: IndentWidth = IndentWidth.Zero - def testChar(idx: Int, p: Char => Boolean): Boolean = { + /** When rewritting to indent, the maximum indent width to rewrite to + * to ensure an indent region is properly closed by outdentation */ + var maximumIndent: Option[IndentWidth] = None + + def testChar(idx: Int, p: Char => Boolean): Boolean = val txt = source.content - idx < txt.length && p(txt(idx)) - } + idx > -1 && idx < txt.length && p(txt(idx)) - def testChar(idx: Int, c: Char): Boolean = { + def testChar(idx: Int, c: Char): Boolean = val txt = source.content - idx < txt.length && txt(idx) == c - } + idx > -1 && idx < txt.length && txt(idx) == c def testChars(from: Int, str: String): Boolean = str.isEmpty || testChar(from, str.head) && testChars(from + 1, str.tail) def skipBlanks(idx: Int, step: Int = 1): Int = - if (testChar(idx, c => c == ' ' || c == '\t' || c == Chars.CR)) skipBlanks(idx + step, step) + if testChar(idx, c => c == ' ' || c == '\t' || c == Chars.CR) then skipBlanks(idx + step, step) else idx /** Parse indentation region `body` and rewrite it to be in braces instead */ @@ -723,30 +731,17 @@ object Parsers { t end indentedToBraces - /** The region to eliminate when replacing an opening `(` or `{` that ends a line. - * The `(` or `{` is at in.offset. - */ - def startingElimRegion(colonRequired: Boolean): (Offset, Offset) = { - val skipped = skipBlanks(in.offset + 1) - if (in.isAfterLineEnd) - if (testChar(skipped, Chars.LF) && !colonRequired) - (in.lineOffset, skipped + 1) // skip the whole line - else - (in.offset, skipped) - else if (testChar(in.offset - 1, ' ')) (in.offset - 1, in.offset + 1) - else (in.offset, in.offset + 1) - } + /** The region to eliminate when replacing a brace or parenthesis that ends a line */ + def elimRegion(offset: Offset): (Offset, Offset) = + val (start, end) = blankLinesAround(offset, offset + 1) + if testChar(end, Chars.LF) then + if testChar(start - 1, Chars.LF) then (start, end + 1) // skip the whole line + else (start, end) // skip the end of line + else (offset, end) // skip from last to end of token - /** The region to eliminate when replacing a closing `)` or `}` that starts a new line - * The `)` or `}` precedes in.lastOffset. - */ - def closingElimRegion(): (Offset, Offset) = { - val skipped = skipBlanks(in.lastOffset) - if (testChar(skipped, Chars.LF)) // if `)` or `}` is on a line by itself - (source.startOfLine(in.lastOffset), skipped + 1) // skip the whole line - else // else - (in.lastOffset - 1, skipped) // move the following text up to where the `)` or `}` was - } + /** Expand the current span to its surrounding blank space */ + def blankLinesAround(start: Offset, end: Offset): (Offset, Offset) = + (skipBlanks(start - 1, -1) + 1, skipBlanks(end, 1)) /** Parse brace-enclosed `body` and rewrite it to be an indentation region instead, if possible. * If possible means: @@ -756,41 +751,112 @@ object Parsers { * 4. there is at least one token between the braces * 5. the closing brace is also at the end of the line, or it is followed by one of * `then`, `else`, `do`, `catch`, `finally`, `yield`, or `match`. - * 6. the opening brace does not follow a `=>`. The reason for this condition is that - * rewriting back to braces does not work after `=>` (since in most cases braces are omitted - * after a `=>` it would be annoying if braces were inserted). - */ - def bracesToIndented[T](body: => T, rewriteWithColon: Boolean): T = { + * 6. the opening brace does not follow a closing `}` + * 7. last token is not a leading operator + * 8. not a block in a sequence of statements + * 9. cannot rewrite to colon after a NEWLINE, e.g. + * true || + * { // NEWLINE inserted between || and { + * false + * } + */ + def bracesToIndented[T](body: => T, inStatSeq: Boolean, rewriteWithColon: Boolean): T = + import IndentRewriteState.* + val lastSaved = in.last.saveCopy + val lastOffsetSaved = in.lastOffset val underColonSyntax = possibleColonOffset == in.lastOffset val colonRequired = rewriteWithColon || underColonSyntax - val (startOpening, endOpening) = startingElimRegion(colonRequired) - val isOutermost = in.currentRegion.isOutermost - def allBraces(r: Region): Boolean = r match { - case r: Indented => r.isOutermost || allBraces(r.enclosing) - case r: InBraces => allBraces(r.enclosing) + val (startOpening, endOpening) = elimRegion(in.offset) + def isBracesOrIndented(r: Region): Boolean = r match + case r: Indented => true + case r: InBraces => true case _ => false - } - var canRewrite = allBraces(in.currentRegion) && // test (1) - !testChars(in.lastOffset - 3, " =>") // test(6) + var canRewrite = isBracesOrIndented(in.currentRegion) && // test (1) + lastSaved.token != RBRACE && // test (6) + !(lastSaved.isOperator && lastSaved.isAfterLineEnd) && // test (7) + !inStatSeq // test (8) val t = enclosed(LBRACE, { - canRewrite &= in.isAfterLineEnd // test (2) - val curOffset = in.offset - try body - finally { - canRewrite &= in.isAfterLineEnd && in.offset != curOffset // test (3)(4) - } + if in.isAfterLineEnd && in.token != RBRACE then // test (2)(4) + toIndentedRegion: + try body + finally canRewrite &= in.isAfterLineEnd // test (3) + else + canRewrite = false + body }) - canRewrite &= (in.isAfterLineEnd || statCtdTokens.contains(in.token)) // test (5) + canRewrite &= (in.isAfterLineEnd || in.token == EOF || statCtdTokens.contains(in.token)) && // test (5) + (!colonRequired || !lastSaved.isNewLine) // test (9) if canRewrite && (!underColonSyntax || Feature.fewerBracesEnabled) then - val openingPatchStr = - if !colonRequired then "" - else if testChar(startOpening - 1, Chars.isOperatorPart(_)) then " :" - else ":" - val (startClosing, endClosing) = closingElimRegion() - patch(source, Span(startOpening, endOpening), openingPatchStr) - patch(source, Span(startClosing, endClosing), "") + val (startClosing, endClosing) = elimRegion(in.last.offset) + // patch over the added indentation to remove braces + patchOver(source, Span(startOpening, endOpening), "") + patchOver(source, Span(startClosing, endClosing), "") + if colonRequired then + if lastSaved.token == IDENTIFIER && lastSaved.isOperator then + patch(Span(lastSaved.offset, lastSaved.offset + lastSaved.name.length), s"`${lastSaved.name}`:") + else if lastSaved.token == IDENTIFIER && lastSaved.name.last == '_' then + patch(Span(lastOffsetSaved), " :") + else patch(Span(lastOffsetSaved), ":") + else + // no need to force outdentation after `}` + maximumIndent = None t - } + end bracesToIndented + + /** When rewriting to indent, make sure there is an indent after a `=>\n` */ + def indentedRegionAfterArrow[T](body: => T, inCaseDef: Boolean = false): T = + if in.rewriteToIndent && (inCaseDef || in.isAfterLineEnd) then + // assert(in.last.isArrow || in.last.token == SELFARROW) + toIndentedRegion(body) + else body + + /** compute required indentation to indent region properly */ + def toIndentedRegion[T](body: => T): T = + import IndentRewriteState.* + val enclosingIndent = minimumIndent + minimumIndent = + if enclosingIndent < in.currentRegion.indentWidth then + in.currentRegion.indentWidth + else if + in.token == CASE && ( + in.currentRegion.enclosing == null || + in.currentRegion.indentWidth == in.currentRegion.enclosing.indentWidth + ) + then enclosingIndent + else enclosingIndent.increment + try body + finally + maximumIndent = Some(minimumIndent) + minimumIndent = enclosingIndent + + /** when rewriting to indent, check that indentaton is correct or patch */ + def patchIndent(): Unit = + if in.isAfterLineEnd && !in.isNewLine && in.token != OUTDENT && in.token != INDENT then + import IndentRewriteState.* + val currentIndent = in.indentWidth(in.offset) + val indentEndOffset = in.lineOffset + currentIndent.size + def isDotOrClosing = (closingParens + DOT).contains(in.token) + val needsOutdent = maximumIndent.exists: max => + currentIndent >= max || (!isDotOrClosing && currentIndent > minimumIndent) + val offByOne = + currentIndent != minimumIndent && currentIndent.isClose(minimumIndent) + if needsOutdent || !(currentIndent >= minimumIndent) || offByOne then + patch(Span(in.lineOffset, indentEndOffset), minimumIndent.toPrefix) + // no need to outdent anymore + if in.token != RBRACE then + maximumIndent = None + + def nextToken(): Unit = + if in.rewriteToIndent then patchIndent() + in.nextToken() + + def skipToken(): Offset = + if in.rewriteToIndent then patchIndent() + in.skipToken() + + def skipToken[T](res: T): T = + if in.rewriteToIndent then patchIndent() + in.skipToken(res) /** Drop (...) or { ... }, replacing the closing element with `endStr` */ def dropParensOrBraces(start: Offset, endStr: String): Unit = { @@ -803,7 +869,7 @@ object Parsers { val preFill = if (closingStartsLine || endStr.isEmpty) "" else " " val postFill = if (in.lastOffset == in.offset) " " else "" val (startClosing, endClosing) = - if (closingStartsLine && endStr.isEmpty) closingElimRegion() + if (closingStartsLine && endStr.isEmpty) elimRegion(in.last.offset) else (in.lastOffset - 1, in.lastOffset) patch(source, Span(startClosing, endClosing), s"$preFill$endStr$postFill") } @@ -1029,7 +1095,7 @@ object Parsers { colonAtEOLOpt() newLineOptWhenFollowing(canStartOperand) if isColonLambda then - in.nextToken() + nextToken() recur(expr(Location.InColonArg)) else if maybePostfix && !canStartOperand(in.token) then val topInfo = opStack.head @@ -1057,19 +1123,17 @@ object Parsers { /** Accept identifier and return its name as a term name. */ def ident(): TermName = - if (isIdent) { + if isIdent then val name = in.name if name == nme.CONSTRUCTOR || name == nme.STATIC_CONSTRUCTOR then report.error( em"""Illegal backquoted identifier: `` and `` are forbidden""", in.sourcePos()) - in.nextToken() + nextToken() name - } - else { + else syntaxErrorOrIncomplete(ExpectedTokenButFound(IDENTIFIER, in.token)) nme.ERROR - } /** Accept identifier and return Ident with its name as a term name. */ def termIdent(): Ident = @@ -1103,7 +1167,7 @@ object Parsers { /** DotSelectors ::= { `.' id } */ def dotSelectors(t: Tree): Tree = - if (in.token == DOT) { in.nextToken(); dotSelectors(selector(t)) } + if (in.token == DOT) { nextToken(); dotSelectors(selector(t)) } else t private val id: Tree => Tree = x => x @@ -1116,11 +1180,11 @@ object Parsers { val start = in.offset def handleThis(qual: Ident) = - in.nextToken() + nextToken() atSpan(start) { This(qual) } def handleSuper(qual: Ident) = - in.nextToken() + nextToken() val mix = mixinQualifierOpt() val t = atSpan(start) { Super(This(qual), mix) } accept(DOT) @@ -1134,10 +1198,10 @@ object Parsers { def qual = cpy.Ident(t)(t.name.toTypeName) in.lookahead.token match case THIS => - in.nextToken() + nextToken() handleThis(qual) case SUPER => - in.nextToken() + nextToken() handleSuper(qual) case _ => t else t @@ -1172,7 +1236,7 @@ object Parsers { def simpleLiteral(): Tree = if isIdent(nme.raw.MINUS) then val start = in.offset - in.nextToken() + nextToken() literal(negOffset = start, inTypeOrSingleton = true) else literal(inTypeOrSingleton = true) @@ -1228,7 +1292,7 @@ object Parsers { syntaxErrorOrIncomplete(IllegalLiteral()) atSpan(negOffset) { Literal(Constant(null)) } } - in.nextToken() + nextToken() t } else atSpan(negOffset) { @@ -1242,7 +1306,7 @@ object Parsers { case _ => Ident(in.name) } } - in.nextToken() + nextToken() Quote(t, Nil) } else @@ -1258,11 +1322,11 @@ object Parsers { if migrateTo3 then patch(source, Span(in.offset, in.offset + 1), "Symbol(\"") patch(source, Span(in.charOffset - 1), "\")") - atSpan(in.skipToken()) { SymbolLit(in.strVal) } + atSpan(skipToken()) { SymbolLit(in.strVal) } else if (in.token == INTERPOLATIONID) interpolatedString(inPattern) else { val t = literalOf(in.token) - in.nextToken() + nextToken() t } } @@ -1275,7 +1339,7 @@ object Parsers { in.charOffset + 1 < in.buf.length && in.buf(in.charOffset) == '"' && in.buf(in.charOffset + 1) == '"' - in.nextToken() + nextToken() def nextSegment(literalOffset: Offset) = segmentBuf += Thicket( literal(literalOffset, inPattern = inPattern, inStringInterpolation = true), @@ -1283,11 +1347,11 @@ object Parsers { if (in.token == IDENTIFIER) termIdent() else if (in.token == USCORE && inPattern) { - in.nextToken() + nextToken() Ident(nme.WILDCARD) } else if (in.token == THIS) { - in.nextToken() + nextToken() This(EmptyTypeIdent) } else if (in.token == LBRACE) @@ -1312,21 +1376,21 @@ object Parsers { /* ------------- NEW LINES ------------------------------------------------- */ def newLineOpt(): Unit = - if (in.token == NEWLINE) in.nextToken() + if in.token == NEWLINE then nextToken() def newLinesOpt(): Unit = - if in.isNewLine then in.nextToken() + if in.isNewLine then nextToken() def newLineOptWhenFollowedBy(token: Int): Unit = // note: next is defined here because current == NEWLINE - if (in.token == NEWLINE && in.next.token == token) in.nextToken() + if in.token == NEWLINE && in.next.token == token then nextToken() def newLinesOptWhenFollowedBy(token: Int): Unit = - if in.isNewLine && in.next.token == token then in.nextToken() + if in.isNewLine && in.next.token == token then nextToken() def newLinesOptWhenFollowedBy(name: Name): Unit = if in.isNewLine && in.next.token == IDENTIFIER && in.next.name == name then - in.nextToken() + nextToken() def newLineOptWhenFollowing(p: Int => Boolean): Unit = // note: next is defined here because current == NEWLINE @@ -1337,16 +1401,16 @@ object Parsers { syntaxErrorOrIncomplete(em"indented definitions expected, ${in} found") def colonAtEOLOpt(): Unit = - possibleColonOffset = in.lastOffset + IndentRewriteState.possibleColonOffset = in.lastOffset in.observeColonEOL(inTemplate = false) if in.token == COLONeol then - in.nextToken() + nextToken() acceptIndent() def argumentStart(): Unit = colonAtEOLOpt() if migrateTo3 && in.token == NEWLINE && in.next.token == LBRACE then - in.nextToken() + nextToken() if in.indentWidth(in.offset) == in.currentRegion.indentWidth then report.errorOrMigrationWarning( em"""This opening brace will start a new statement in Scala 3. @@ -1360,7 +1424,7 @@ object Parsers { if in.token == COLONeol then if in.lookahead.token == END then in.token = NEWLINE else - in.nextToken() + nextToken() if in.token != LBRACE then acceptIndent() else newLineOptWhenFollowedBy(LBRACE) @@ -1400,11 +1464,11 @@ object Parsers { didMatch if in.token == END then - val start = in.skipToken() + val start = skipToken() if stats.isEmpty || !matchesAndSetEnd(stats.last) then syntaxError(em"misaligned end marker", Span(start, in.lastCharOffset)) in.token = IDENTIFIER // Leaving it as the original token can confuse newline insertion - in.nextToken() + nextToken() end checkEndMarker /* ------------- TYPES ------------------------------------------------------ */ @@ -1496,10 +1560,10 @@ object Parsers { imods |= Impure if token == CTXARROW then - in.nextToken() + nextToken() imods |= Given else if token == ARROW || token == TLARROW then - in.nextToken() + nextToken() else accept(ARROW) @@ -1524,16 +1588,16 @@ object Parsers { val t = if (in.token == LPAREN) { - in.nextToken() + nextToken() if (in.token == RPAREN) { - in.nextToken() + nextToken() functionRest(Nil) } else { val paramStart = in.offset def addErased() = erasedArgs.addOne(isErasedKw) - if isErasedKw then { in.skipToken(); } + if isErasedKw then { skipToken(); } addErased() val ts = in.currentRegion.withCommasExpected { funArgType() match @@ -1579,9 +1643,9 @@ object Parsers { val start = in.offset val tparams = typeParamClause(ParamOwner.TypeParam) if (in.token == TLARROW) - atSpan(start, in.skipToken())(LambdaTypeTree(tparams, toplevelTyp())) + atSpan(start, skipToken())(LambdaTypeTree(tparams, toplevelTyp())) else if (in.token == ARROW || isPureArrow(nme.PUREARROW)) { - val arrowOffset = in.skipToken() + val arrowOffset = skipToken() val body = toplevelTyp() atSpan(start, arrowOffset) { getFunction(body) match { @@ -1701,7 +1765,7 @@ object Parsers { }) else if Feature.ccEnabled && in.isIdent(nme.UPARROW) && isCaptureUpArrow then val upArrowStart = in.offset - in.nextToken() + nextToken() def cs = if in.token == LBRACE then captureSet() else atSpan(upArrowStart)(captureRoot) :: Nil @@ -1717,7 +1781,7 @@ object Parsers { def withTypeRest(t: Tree): Tree = if in.token == WITH then val withOffset = in.offset - in.nextToken() + nextToken() if in.token == LBRACE || in.token == INDENT then t else @@ -1753,12 +1817,13 @@ object Parsers { val inPattern = (staged & StageKind.QuotedPattern) != 0 val expr = if (in.name.length == 1) { - in.nextToken() + nextToken() + val inPattern = (staged & StageKind.QuotedPattern) != 0 withinStaged(StageKind.Spliced)(if (inPattern) inBraces(pattern()) else stagedBlock()) } else atSpan(in.offset + 1) { val id = Ident(in.name.drop(1)) - in.nextToken() + nextToken() id } if isType then @@ -1786,23 +1851,23 @@ object Parsers { SingletonTypeTree(simpleLiteral()) else if in.token == USCORE then if ctx.settings.YkindProjector.value == "underscores" then - val start = in.skipToken() + val start = skipToken() Ident(tpnme.USCOREkw).withSpan(Span(start, in.lastOffset, start)) else if sourceVersion.isAtLeast(future) then deprecationWarning(em"`_` is deprecated for wildcard arguments of types: use `?` instead") patch(source, Span(in.offset, in.offset + 1), "?") - val start = in.skipToken() + val start = skipToken() typeBounds().withSpan(Span(start, in.lastOffset, start)) // Allow symbols -_ and +_ through for compatibility with code written using kind-projector in Scala 3 underscore mode. // While these signify variant type parameters in Scala 2 + kind-projector, we ignore their variance markers since variance is inferred. else if (isIdent(nme.MINUS) || isIdent(nme.PLUS)) && in.lookahead.token == USCORE && ctx.settings.YkindProjector.value == "underscores" then val identName = in.name.toTypeName ++ nme.USCOREkw - val start = in.skipToken() - in.nextToken() + val start = skipToken() + nextToken() Ident(identName).withSpan(Span(start, in.lastOffset, start)) else if isIdent(nme.?) then - val start = in.skipToken() + val start = skipToken() typeBounds().withSpan(Span(start, in.lastOffset, start)) else def singletonArgs(t: Tree): Tree = @@ -1832,9 +1897,9 @@ object Parsers { else def singletonCompletion(t: Tree): Tree = if in.token == DOT then - in.nextToken() + nextToken() if in.token == TYPE then - in.nextToken() + nextToken() atSpan(startOffset(t)) { SingletonTypeTree(t) } else singletonCompletion(selector(t)) @@ -1924,7 +1989,7 @@ object Parsers { in.currentRegion.withCommasExpected { argType() match { case Ident(name) if in.token == EQUALS => - in.nextToken() + nextToken() commaSeparatedRest(NamedArg(name, argType()), () => namedTypeArg()) case firstArg => commaSeparatedRest(firstArg, () => argType()) @@ -1936,7 +2001,7 @@ object Parsers { def paramTypeOf(core: () => Tree): Tree = if in.token == ARROW || isPureArrow(nme.PUREARROW) then val isImpure = in.token == ARROW - atSpan(in.skipToken()): + atSpan(skipToken()): val tp = if isImpure then core() else capturesAndResult(core) if isImpure && Feature.pureFunsEnabled then ImpureByNameTypeTree(tp) else ByNameTypeTree(tp) @@ -1947,7 +2012,7 @@ object Parsers { if in.isIdent(nme.into) && in.featureEnabled(Feature.into) && canStartTypeTokens.contains(in.lookahead.token) - then atSpan(in.skipToken()) { Into(tp()) } + then atSpan(skipToken()) { Into(tp()) } else tp() /** FunArgType ::= Type @@ -1967,7 +2032,7 @@ object Parsers { def paramValueType(): Tree = { val t = maybeInto(toplevelTyp) if (isIdent(nme.raw.STAR)) { - in.nextToken() + nextToken() atSpan(startOffset(t)) { PostfixOp(t, Ident(tpnme.raw.STAR)) } } else t @@ -1992,7 +2057,7 @@ object Parsers { atSpan(in.offset) { TypeBoundsTree(bound(SUPERTYPE), bound(SUBTYPE)) } private def bound(tok: Int): Tree = - if (in.token == tok) { in.nextToken(); toplevelTyp() } + if (in.token == tok) { nextToken(); toplevelTyp() } else EmptyTree /** TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type} @@ -2006,21 +2071,21 @@ object Parsers { def contextBounds(pname: TypeName): List[Tree] = if in.isColon then - atSpan(in.skipToken()) { + atSpan(skipToken()) { AppliedTypeTree(toplevelTyp(), Ident(pname)) } :: contextBounds(pname) else if in.token == VIEWBOUND then report.errorOrMigrationWarning( em"view bounds `<%' are no longer supported, use a context bound `:' instead", in.sourcePos(), from = `3.0`) - atSpan(in.skipToken()) { + atSpan(skipToken()) { Function(Ident(pname) :: Nil, toplevelTyp()) } :: contextBounds(pname) else Nil def typedOpt(): Tree = - if in.isColon then { in.nextToken(); toplevelTyp() } + if in.isColon then { nextToken(); toplevelTyp() } else TypeTree().withSpan(Span(in.lastOffset)) def typeDependingOn(location: Location): Tree = @@ -2108,7 +2173,7 @@ object Parsers { def subExpr() = subPart(expr) - def expr(location: Location): Tree = { + def expr(location: Location, inStatSeq: Boolean = false): Tree = { val start = in.offset in.token match case IMPLICIT => @@ -2136,7 +2201,7 @@ object Parsers { else new WildcardFunction(placeholderParams.reverse, t) finally placeholderParams = saved - val t = expr1(location) + val t = expr1(location, inStatSeq) if in.isArrow then placeholderParams = Nil // don't interpret `_' to the left of `=>` as placeholder wrapPlaceholders(closureRest(start, location, convertToParams(t))) @@ -2148,11 +2213,11 @@ object Parsers { wrapPlaceholders(t) } - def expr1(location: Location = Location.ElseWhere): Tree = in.token match + def expr1(location: Location = Location.ElseWhere, inStatSeq: Boolean = false): Tree = in.token match case IF => ifExpr(in.offset, If) case WHILE => - atSpan(in.skipToken()) { + atSpan(skipToken()) { val cond = condExpr(DO) newLinesOpt() val body = subExpr() @@ -2163,10 +2228,10 @@ object Parsers { em"""`do while ` is no longer supported, |use `while ; do ()` instead.${rewriteNotice()}""", in.sourcePos(), from = `3.0`) - val start = in.skipToken() + val start = skipToken() atSpan(start) { val body = expr() - if (isStatSep) in.nextToken() + if (isStatSep) nextToken() val whileStart = in.offset accept(WHILE) val cond = expr() @@ -2184,12 +2249,12 @@ object Parsers { } case TRY => val tryOffset = in.offset - atSpan(in.skipToken()) { + atSpan(skipToken()) { val body = expr() val (handler, handlerStart) = if in.token == CATCH then val span = in.offset - in.nextToken() + nextToken() (if in.token == CASE then Match(EmptyTree, caseClause(exprOnly = true) :: Nil) else subExpr(), span) @@ -2207,7 +2272,7 @@ object Parsers { val finalizer = if (in.token == FINALLY) { - in.nextToken(); + nextToken(); val expr = subExpr() if expr.span.exists then expr else Literal(Constant(())) // finally without an expression @@ -2223,9 +2288,9 @@ object Parsers { ParsedTry(body, handler, finalizer) } case THROW => - atSpan(in.skipToken()) { Throw(expr()) } + atSpan(skipToken()) { Throw(expr()) } case RETURN => - atSpan(in.skipToken()) { + atSpan(skipToken()) { Return(if (isExprIntro) expr() else EmptyTree, EmptyTree) } case FOR => @@ -2235,7 +2300,7 @@ object Parsers { && !in.inModifierPosition() && in.canStartExprTokens.contains(in.lookahead.token) then - val start = in.skipToken() + val start = skipToken() in.token match case IF => ifExpr(start, InlineIf) @@ -2246,21 +2311,21 @@ object Parsers { case t => syntaxError(em"`inline` must be followed by an `if` or a `match`", start) t - else expr1Rest(postfixExpr(location), location) + else expr1Rest(postfixExpr(location, inStatSeq), location) end expr1 def expr1Rest(t: Tree, location: Location): Tree = if in.token == EQUALS then t match case Ident(_) | Select(_, _) | Apply(_, _) | PrefixOp(_, _) => - atSpan(startOffset(t), in.skipToken()) { + atSpan(startOffset(t), skipToken()) { val loc = if location.inArgs then location else Location.ElseWhere Assign(t, subPart(() => expr(loc))) } case _ => t else if in.isColon then - in.nextToken() + nextToken() ascription(t, location) else t @@ -2268,9 +2333,9 @@ object Parsers { def ascription(t: Tree, location: Location): Tree = atSpan(startOffset(t)) { in.token match { case USCORE if in.lookahead.isIdent(nme.raw.STAR) => - val uscoreStart = in.skipToken() + val uscoreStart = skipToken() val isVarargSplice = location.inArgs && followingIsVararg() - in.nextToken() + nextToken() if isVarargSplice then report.errorOrMigrationWarning( em"The syntax `x: _*` is no longer supported for vararg splices; use `x*` instead${rewriteNotice(`future-migration`)}", @@ -2303,11 +2368,11 @@ object Parsers { * `if' Expr `then' Expr [[semi] else Expr] */ def ifExpr(start: Offset, mkIf: (Tree, Tree, Tree) => If): If = - atSpan(start, in.skipToken()) { + atSpan(start, skipToken()) { val cond = condExpr(THEN) newLinesOpt() val thenp = subExpr() - val elsep = if (in.token == ELSE) { in.nextToken(); subExpr() } + val elsep = if (in.token == ELSE) { nextToken(); subExpr() } else EmptyTree mkIf(cond, thenp, elsep) } @@ -2315,7 +2380,7 @@ object Parsers { /** MatchClause ::= `match' `{' CaseClauses `}' */ def matchClause(t: Tree): Match = - atSpan(startOffset(t), in.skipToken()) { + atSpan(startOffset(t), skipToken()) { Match(t, inBracesOrIndented(caseClauses(() => caseClause()))) } @@ -2333,7 +2398,7 @@ object Parsers { */ def funParams(mods: Modifiers, location: Location): List[Tree] = if in.token == LPAREN then - in.nextToken() + nextToken() if in.token == RPAREN then Nil else @@ -2350,7 +2415,7 @@ object Parsers { em"This syntax is no longer supported; parameter needs to be enclosed in (...)${rewriteNotice(`future-migration`)}", source.atSpan(Span(start, in.lastOffset)), from = future) - in.nextToken() + nextToken() val t = infixType() if (sourceVersion == `future-migration`) { patch(source, Span(start), "(") @@ -2372,7 +2437,7 @@ object Parsers { def bindingName(): TermName = if (in.token == USCORE) { - in.nextToken() + nextToken() WildcardParamName.fresh() } else ident() @@ -2388,10 +2453,10 @@ object Parsers { if in.token == CTXARROW then if params.isEmpty then syntaxError(em"context function literals require at least one formal parameter", Span(start, in.lastOffset)) - in.nextToken() + nextToken() else accept(ARROW) - val body = + val body = indentedRegionAfterArrow: if location == Location.InBlock then block() else if location == Location.InColonArg && in.token == INDENT then blockExpr() else expr() @@ -2404,21 +2469,21 @@ object Parsers { * | InfixExpr id ColonArgument * | InfixExpr MatchClause */ - def postfixExpr(location: Location = Location.ElseWhere): Tree = - val t = postfixExprRest(prefixExpr(location), location) + def postfixExpr(location: Location = Location.ElseWhere, inStatSeq: Boolean = false): Tree = + val t = postfixExprRest(prefixExpr(location, inStatSeq), location) if location.inArgs && followingIsVararg() then - Typed(t, atSpan(in.skipToken()) { Ident(tpnme.WILDCARD_STAR) }) + Typed(t, atSpan(skipToken()) { Ident(tpnme.WILDCARD_STAR) }) else t def postfixExprRest(t: Tree, location: Location): Tree = - infixOps(t, in.canStartExprTokens, prefixExpr, location, ParseKind.Expr, + infixOps(t, in.canStartExprTokens, prefixExpr(_), location, ParseKind.Expr, isOperator = !(location.inArgs && followingIsVararg())) /** PrefixExpr ::= [PrefixOperator'] SimpleExpr * PrefixOperator ::= ‘-’ | ‘+’ | ‘~’ | ‘!’ (if not backquoted) */ - val prefixExpr: Location => Tree = location => + def prefixExpr(location: Location, inStatSeq: Boolean = false): Tree = if in.token == IDENTIFIER && nme.raw.isUnary(in.name) && in.canStartExprTokens.contains(in.lookahead.token) then @@ -2428,7 +2493,7 @@ object Parsers { simpleExprRest(literal(start), location, canApply = true) else atSpan(start) { PrefixOp(op, simpleExpr(location)) } - else simpleExpr(location) + else simpleExpr(location, inStatSeq) /** SimpleExpr ::= ‘new’ ConstrApp {`with` ConstrApp} [TemplateBody] * | ‘new’ TemplateBody @@ -2454,7 +2519,7 @@ object Parsers { * Quoted ::= ‘'’ ‘{’ Block ‘}’ * | ‘'’ ‘[’ Type ‘]’ */ - def simpleExpr(location: Location): Tree = { + def simpleExpr(location: Location, inStatSeq: Boolean = false): Tree = { var canApply = true val t = in.token match { case XMLSTART => @@ -2465,7 +2530,7 @@ object Parsers { case BACKQUOTED_IDENT | THIS | SUPER => simpleRef() case USCORE => - val start = in.skipToken() + val start = skipToken() val pname = WildcardParamName.fresh() val param = ValDef(pname, TypeTree(), EmptyTree).withFlags(SyntheticTermParam) .withSpan(Span(start)) @@ -2475,9 +2540,9 @@ object Parsers { atSpan(in.offset) { makeTupleOrParens(inParens(exprsInParensOrBindings())) } case LBRACE | INDENT => canApply = false - blockExpr() + blockExpr(inStatSeq) case QUOTE => - atSpan(in.skipToken()) { + atSpan(skipToken()) { withinStaged(StageKind.Quoted | (if (location.inPattern) StageKind.QuotedPattern else 0)) { val body = if (in.token == LBRACKET) inBrackets(typ()) @@ -2489,14 +2554,14 @@ object Parsers { canApply = false newExpr() case MACRO => - val start = in.skipToken() + val start = skipToken() MacroTree(simpleExpr(Location.ElseWhere)) case _ => if isLiteral then literal() else if in.isColon then syntaxError(IllegalStartSimpleExpr(tokenString(in.token))) - in.nextToken() + nextToken() simpleExpr(location) else val start = in.lastOffset @@ -2510,7 +2575,7 @@ object Parsers { if (canApply) argumentStart() in.token match case DOT => - in.nextToken() + nextToken() simpleExprRest(selectorOrMatch(t), location, canApply = true) case LBRACKET => val tapp = atSpan(startOffset(t), in.offset) { TypeApply(t, typeArgs(namedOK = true, wildOK = false)) } @@ -2519,7 +2584,7 @@ object Parsers { val app = atSpan(startOffset(t), in.offset) { mkApply(t, argumentExprs()) } simpleExprRest(app, location, canApply = true) case USCORE => - atSpan(startOffset(t), in.skipToken()) { PostfixOp(t, Ident(nme.WILDCARD)) } + atSpan(startOffset(t), skipToken()) { PostfixOp(t, Ident(nme.WILDCARD)) } case _ => if in.isColon && location == Location.InParens && followingIsLambdaParams() then t match @@ -2532,7 +2597,7 @@ object Parsers { } case _ => t else if isColonLambda then - val app = atSpan(startOffset(t), in.skipToken()) { + val app = atSpan(startOffset(t), skipToken()) { Apply(t, expr(Location.InColonArg) :: Nil) } simpleExprRest(app, location, canApply = true) @@ -2543,7 +2608,7 @@ object Parsers { * | ‘new’ TemplateBody */ def newExpr(): Tree = - val start = in.skipToken() + val start = skipToken() def reposition(t: Tree) = t.withSpan(Span(start, in.lastOffset)) possibleTemplateStart() val parents = @@ -2581,7 +2646,7 @@ object Parsers { if in.token == RPAREN then (Nil, false) else if isIdent(nme.using) then - in.nextToken() + nextToken() (commaSeparated(argumentExpr), true) else (commaSeparated(argumentExpr), false) @@ -2639,12 +2704,12 @@ object Parsers { /** BlockExpr ::= <<< (CaseClauses | Block) >>> */ - def blockExpr(): Tree = atSpan(in.offset) { + def blockExpr(inStatSeq: Boolean = false): Tree = atSpan(in.offset) { val simplify = in.token == INDENT - inDefScopeBraces { + inDefScopeBraces({ if (in.token == CASE) Match(EmptyTree, caseClauses(() => caseClause())) else block(simplify) - } + }, inStatSeq = inStatSeq) } /** Block ::= BlockStatSeq @@ -2665,7 +2730,7 @@ object Parsers { /** Guard ::= if PostfixExpr */ def guard(): Tree = - if (in.token == IF) { in.nextToken(); postfixExpr(Location.InGuard) } + if (in.token == IF) { nextToken(); postfixExpr(Location.InGuard) } else EmptyTree /** Enumerators ::= Generator {semi Enumerator | Guard} @@ -2674,7 +2739,7 @@ object Parsers { def enumeratorsRest(): List[Tree] = if (isStatSep) { - in.nextToken() + nextToken() if (in.token == DO || in.token == YIELD || in.token == RBRACE) Nil else enumerator() :: enumeratorsRest() } @@ -2691,14 +2756,14 @@ object Parsers { else if (in.token == CASE) generator() else { val pat = pattern1() - if (in.token == EQUALS) atSpan(startOffset(pat), in.skipToken()) { GenAlias(pat, subExpr()) } + if (in.token == EQUALS) atSpan(startOffset(pat), skipToken()) { GenAlias(pat, subExpr()) } else generatorRest(pat, casePat = false) } /** Generator ::= [‘case’] Pattern `<-' Expr */ def generator(): Tree = { - val casePat = if (in.token == CASE) { in.nextToken(); true } else false + val casePat = if (in.token == CASE) { nextToken(); true } else false generatorRest(pattern1(), casePat) } @@ -2717,14 +2782,14 @@ object Parsers { * | ‘for’ Enumerators (‘do‘ | ‘yield’) Expr */ def forExpr(): Tree = - atSpan(in.skipToken()) { + atSpan(skipToken()) { var wrappedEnums = true val start = in.offset val forEnd = in.lastOffset val leading = in.token val enums = if (leading == LBRACE || leading == LPAREN && followingIsEnclosedGenerators()) { - in.nextToken() + nextToken() val res = if (leading == LBRACE || in.token == CASE) enumerators() @@ -2776,12 +2841,12 @@ object Parsers { } newLinesOpt() if (in.token == YIELD) { - in.nextToken() + nextToken() ForYield(enums, subExpr()) } else if (in.token == DO) { if (rewriteToOldSyntax()) dropTerminator() - in.nextToken() + nextToken() ForDo(enums, subExpr()) } else { @@ -2809,14 +2874,16 @@ object Parsers { (pattern(), guard()) } CaseDef(pat, grd, atSpan(accept(ARROW)) { - if exprOnly then - if in.indentSyntax && in.isAfterLineEnd && in.token != INDENT then - warning(em"""Misleading indentation: this expression forms part of the preceding catch case. - |If this is intended, it should be indented for clarity. - |Otherwise, if the handler is intended to be empty, use a multi-line catch with - |an indented case.""") - expr() - else block() + indentedRegionAfterArrow({ + if exprOnly then + if in.indentSyntax && in.isAfterLineEnd && in.token != INDENT then + warning(em"""Misleading indentation: this expression forms part of the preceding catch case. + |If this is intended, it should be indented for clarity. + |Otherwise, if the handler is intended to be empty, use a multi-line catch with + |an indented case.""") + expr() + else block() + }, inCaseDef = true) }) } @@ -2827,15 +2894,15 @@ object Parsers { accept(CASE) in.token match { case USCORE if in.lookahead.isArrow => - val start = in.skipToken() + val start = skipToken() Ident(tpnme.WILDCARD).withSpan(Span(start, in.lastOffset, start)) case _ => rejectWildcardType(infixType()) } } CaseDef(pat, EmptyTree, atSpan(accept(ARROW)) { - val t = rejectWildcardType(typ()) - if in.token == SEMI then in.nextToken() + val t = indentedRegionAfterArrow(rejectWildcardType(typ()), inCaseDef = true) + if in.token == SEMI then nextToken() newLinesOptWhenFollowedBy(CASE) t }) @@ -2852,7 +2919,7 @@ object Parsers { else pat def patternAlts(location: Location): List[Tree] = - if (isIdent(nme.raw.BAR)) { in.nextToken(); pattern1(location) :: patternAlts(location) } + if (isIdent(nme.raw.BAR)) { nextToken(); pattern1(location) :: patternAlts(location) } else Nil /** Pattern1 ::= PatVar Ascription @@ -2874,7 +2941,7 @@ object Parsers { warnFrom = `3.3`, errorFrom = future ) - in.nextToken() + nextToken() ascription(p, location) else p @@ -2884,7 +2951,7 @@ object Parsers { def pattern3(): Tree = val p = infixPattern() if followingIsVararg() then - val start = in.skipToken() + val start = skipToken() p match case p @ Ident(name) if name.isVarPattern => Typed(p, atSpan(start) { Ident(tpnme.WILDCARD_STAR) }) @@ -2897,7 +2964,7 @@ object Parsers { */ val pattern2: () => Tree = () => pattern3() match case p @ Ident(name) if in.token == AT => - val offset = in.skipToken() + val offset = skipToken() pattern3() match { case pt @ Bind(nme.WILDCARD, pt1: Typed) if pt.mods.is(Given) => atSpan(startOffset(p), 0) { Bind(name, pt1).withMods(pt.mods) } @@ -2948,7 +3015,7 @@ object Parsers { xmlLiteralPattern() case GIVEN => atSpan(in.offset) { - val givenMod = atSpan(in.skipToken())(Mod.Given()) + val givenMod = atSpan(skipToken())(Mod.Given()) val typed = Typed(Ident(nme.WILDCARD), refinedType()) Bind(nme.WILDCARD, typed).withMods(addMod(Modifiers(), givenMod)) } @@ -2965,7 +3032,7 @@ object Parsers { def simplePatternRest(t: Tree): Tree = if in.token == DOT then - in.nextToken() + nextToken() simplePatternRest(selector(t)) else var p = t @@ -3026,7 +3093,7 @@ object Parsers { private def addModifier(mods: Modifiers): Modifiers = { val tok = in.token val name = in.name - val mod = atSpan(in.skipToken()) { modOfToken(tok, name) } + val mod = atSpan(skipToken()) { modOfToken(tok, name) } if (mods.isOneOf(mod.flags)) syntaxError(RepeatedModifier(mod.flags.flagsString)) addMod(mods, mod) @@ -3051,7 +3118,7 @@ object Parsers { if sourceVersion.isAtLeast(future) then deprecationWarning( em"The [this] qualifier will be deprecated in the future; it should be dropped.") - in.nextToken() + nextToken() mods | Local else mods.withPrivateWithin(ident().toTypeName) } @@ -3080,7 +3147,7 @@ object Parsers { val mods1 = addModifier(mods) loop(if (isAccessMod) accessQualifierOpt(mods1) else mods1) else if (in.isNewLine && (mods.hasFlags || mods.hasAnnotations)) { - in.nextToken() + nextToken() loop(mods) } else @@ -3180,7 +3247,7 @@ object Parsers { def checkVarianceOK(): Boolean = val ok = ownerKind != ParamOwner.Def && ownerKind != ParamOwner.TypeParam if !ok then syntaxError(em"no `+/-` variance annotation allowed here") - in.nextToken() + nextToken() ok def typeParam(): TypeDef = { @@ -3191,7 +3258,7 @@ object Parsers { if Feature.ccEnabled && in.token == SEALED then if ownerKind == ParamOwner.Def then mods |= Sealed else syntaxError(em"`sealed` modifier only allowed for method type parameters") - in.nextToken() + nextToken() if isIdent(nme.raw.PLUS) && checkVarianceOK() then mods |= Covariant else if isIdent(nme.raw.MINUS) && checkVarianceOK() then @@ -3199,7 +3266,7 @@ object Parsers { atSpan(start, nameStart) { val name = if (isAbstractOwner && in.token == USCORE) { - in.nextToken() + nextToken() WildcardParamName.fresh().toTypeName } else ident().toTypeName @@ -3251,7 +3318,7 @@ object Parsers { ): List[ValDef] = { var impliedMods: Modifiers = EmptyModifiers - def addParamMod(mod: () => Mod) = impliedMods = addMod(impliedMods, atSpan(in.skipToken()) { mod() }) + def addParamMod(mod: () => Mod) = impliedMods = addMod(impliedMods, atSpan(skipToken()) { mod() }) def paramMods() = if in.token == IMPLICIT then @@ -3269,10 +3336,10 @@ object Parsers { mods = addFlag(modifiers(start = mods), ParamAccessor) mods = if in.token == VAL then - in.nextToken() + nextToken() mods else if in.token == VAR then - val mod = atSpan(in.skipToken()) { Mod.Var() } + val mod = atSpan(skipToken()) { Mod.Var() } addMod(mods, mod) else if (!(mods.flags &~ (ParamAccessor | Inline | Erased | impliedMods.flags)).isEmpty) @@ -3293,7 +3360,7 @@ object Parsers { // needed?, it's checked later anyway val tpt = paramType() val default = - if (in.token == EQUALS) { in.nextToken(); subExpr() } + if (in.token == EQUALS) { nextToken(); subExpr() } else EmptyTree if (impliedMods.mods.nonEmpty) impliedMods = impliedMods.withMods(Nil) // keep only flags, so that parameter positions don't overlap @@ -3453,12 +3520,12 @@ object Parsers { in.sourcePos(), from = future) patch(source, Span(in.offset, in.offset + 1), "*") - ImportSelector(atSpan(in.skipToken()) { Ident(nme.WILDCARD) }) + ImportSelector(atSpan(skipToken()) { Ident(nme.WILDCARD) }) /** 'given [InfixType]' */ def givenSelector() = ImportSelector( - atSpan(in.skipToken()) { Ident(nme.EMPTY) }, + atSpan(skipToken()) { Ident(nme.EMPTY) }, bound = if canStartInfixTypeTokens.contains(in.token) then rejectWildcardType(infixType()) else EmptyTree) @@ -3474,7 +3541,7 @@ object Parsers { patch(source, Span(in.offset, in.offset + 2), if testChar(in.offset - 1, ' ') && testChar(in.offset + 2, ' ') then "as" else " as ") - atSpan(startOffset(from), in.skipToken()) { + atSpan(startOffset(from), skipToken()) { val to = if in.token == USCORE then wildcardIdent() else termIdent() ImportSelector(from, if to.name == nme.ERROR then EmptyTree else to) } @@ -3545,16 +3612,16 @@ object Parsers { */ def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match { case VAL => - in.nextToken() + nextToken() patDefOrDcl(start, mods) case VAR => - val mod = atSpan(in.skipToken()) { Mod.Var() } + val mod = atSpan(skipToken()) { Mod.Var() } val mod1 = addMod(mods, mod) patDefOrDcl(start, mod1) case DEF => - defDefOrDcl(start, in.skipToken(mods)) + defDefOrDcl(start, skipToken(mods)) case TYPE => - typeDefOrDcl(start, in.skipToken(mods)) + typeDefOrDcl(start, skipToken(mods)) case CASE if inEnum => enumCase(start, mods) case _ => @@ -3572,7 +3639,7 @@ object Parsers { val first = pattern2() var lhs = first match { case id: Ident if in.token == COMMA => - in.nextToken() + nextToken() id :: commaSeparated(() => termIdent()) case _ => first :: Nil @@ -3639,7 +3706,7 @@ object Parsers { false if (in.token == THIS) { - in.nextToken() + nextToken() val vparamss = termParamClauses(numLeadParams = numLeadParams) if (vparamss.isEmpty || vparamss.head.take(1).exists(_.mods.isOneOf(GivenOrImplicit))) in.token match { @@ -3673,7 +3740,7 @@ object Parsers { if (migrateTo3) newLineOptWhenFollowedBy(LBRACE) val rhs = if in.token == EQUALS then - in.nextToken() + nextToken() subExpr() else if !tpt.isEmpty then EmptyTree @@ -3700,8 +3767,11 @@ object Parsers { atSpan(in.offset) { inBracesOrIndented { val stats = selfInvocation() :: ( - if (isStatSep) { in.nextToken(); blockStatSeq() } - else Nil) + if isStatSep then + nextToken() + blockStatSeq() + else Nil + ) Block(stats, Literal(Constant(()))) } } @@ -3732,12 +3802,12 @@ object Parsers { } in.token match { case EQUALS => - in.nextToken() + nextToken() makeTypeDef(toplevelTyp()) case SUBTYPE | SUPERTYPE => val bounds = typeBounds() if (in.token == EQUALS) { - val eqOffset = in.skipToken() + val eqOffset = skipToken() var rhs = toplevelTyp() rhs match { case mtt: MatchTypeTree => @@ -3773,19 +3843,19 @@ object Parsers { def tmplDef(start: Int, mods: Modifiers): Tree = in.token match { case TRAIT => - classDef(start, in.skipToken(addFlag(mods, Trait))) + classDef(start, skipToken(addFlag(mods, Trait))) case CLASS => - classDef(start, in.skipToken(mods)) + classDef(start, skipToken(mods)) case CASECLASS => - classDef(start, in.skipToken(mods | Case)) + classDef(start, skipToken(mods | Case)) case OBJECT => - objectDef(start, in.skipToken(mods | Module)) + objectDef(start, skipToken(mods | Module)) case CASEOBJECT => - objectDef(start, in.skipToken(mods | Case | Module)) + objectDef(start, skipToken(mods | Case | Module)) case ENUM => - enumDef(start, in.skipToken(mods | Enum)) + enumDef(start, skipToken(mods | Enum)) case GIVEN => - givenDef(start, mods, atSpan(in.skipToken()) { Mod.Given() }) + givenDef(start, mods, atSpan(skipToken()) { Mod.Given() }) case _ => val start = in.lastOffset syntaxErrorOrIncomplete(ExpectedStartOfTopLevelDefinition()) @@ -3855,7 +3925,7 @@ object Parsers { atSpan(start, nameStart) { val id = termIdent() if (in.token == COMMA) { - in.nextToken() + nextToken() val ids = commaSeparated(() => termIdent()) PatDef(mods1, id :: ids, TypeTree(), EmptyTree) } @@ -3877,7 +3947,7 @@ object Parsers { def caseTemplate(constr: DefDef): Template = { val parents = if (in.token == EXTENDS) { - in.nextToken() + nextToken() constrApps() } else Nil @@ -3953,7 +4023,7 @@ object Parsers { * {UsingParamClause} ExtMethods */ def extension(): ExtMethods = - val start = in.skipToken() + val start = skipToken() val tparams = typeParamClauseOpt(ParamOwner.Def) val leadParamss = ListBuffer[List[ValDef]]() def numLeadParams = leadParamss.map(_.length).sum @@ -3965,7 +4035,7 @@ object Parsers { leadParamss ++= termParamClauses(givenOnly = true, numLeadParams = numLeadParams) if in.isColon then syntaxError(em"no `:` expected here") - in.nextToken() + nextToken() val methods: List[Tree] = if in.token == EXPORT then exportClause() @@ -4029,7 +4099,7 @@ object Parsers { val ts = val tok = in.token if (tok == WITH || tok == COMMA) && tok != exclude then - in.nextToken() + nextToken() constrApps(exclude = if tok == WITH then COMMA else WITH) else Nil t :: ts @@ -4041,7 +4111,7 @@ object Parsers { val la = in.lookahead la.isAfterLineEnd || la.token == LBRACE if in.token == WITH && !isTemplateStart then - in.nextToken() + nextToken() constrApp() :: withConstrApps() else Nil @@ -4051,7 +4121,7 @@ object Parsers { def template(constr: DefDef, isEnum: Boolean = false): Template = { val parents = if (in.token == EXTENDS) { - in.nextToken() + nextToken() if (in.token == LBRACE || in.token == COLONeol) { report.errorOrMigrationWarning( em"`extends` must be followed by at least one parent", @@ -4064,7 +4134,7 @@ object Parsers { newLinesOptWhenFollowedBy(nme.derives) val derived = if (isIdent(nme.derives)) { - in.nextToken() + nextToken() commaSeparated(() => convertToTypeId(qualId())) } else Nil @@ -4103,10 +4173,10 @@ object Parsers { Template(constr, parents, derived, self, stats) def templateBody(parents: List[Tree], rewriteWithColon: Boolean = true): (ValDef, List[Tree]) = - val r = inDefScopeBraces(templateStatSeq(), rewriteWithColon) + val r = inDefScopeBraces(templateStatSeq(), rewriteWithColon = rewriteWithColon) if in.token == WITH && parents.isEmpty then syntaxError(EarlyDefinitionsNotSupported()) - in.nextToken() + nextToken() template(emptyConstructor) r @@ -4146,9 +4216,9 @@ object Parsers { while var empty = false if (in.token == PACKAGE) { - val start = in.skipToken() + val start = skipToken() if (in.token == OBJECT) { - in.nextToken() + nextToken() stats += objectDef(start, Modifiers(Package)) } else stats += packaging(start) @@ -4179,19 +4249,19 @@ object Parsers { atSpan(in.offset) { val selfName = if in.token == THIS then - in.nextToken() + nextToken() nme.WILDCARD else ident() val selfTpt = if in.isColon then - in.nextToken() + nextToken() infixType() else if selfName == nme.WILDCARD then accept(COLONfollow) TypeTree() if in.token == ARROW then in.token = SELFARROW // suppresses INDENT insertion after `=>` - in.nextToken() + nextToken() else syntaxError(em"`=>` expected after self type") makeSelfDef(selfName, selfTpt) @@ -4211,23 +4281,28 @@ object Parsers { */ def templateStatSeq(): (ValDef, List[Tree]) = checkNoEscapingPlaceholders { val stats = new ListBuffer[Tree] + val startsAfterLineEnd = in.isAfterLineEnd val self = selfType() - while - var empty = false - if (in.token == IMPORT) - stats ++= importClause() - else if (in.token == EXPORT) - stats ++= exportClause() - else if isIdent(nme.extension) && followingIsExtension() then - stats += extension() - else if (isDefIntro(modifierTokensOrCase)) - stats +++= defOrDcl(in.offset, defAnnotsMods(modifierTokens)) - else if (isExprIntro) - stats += expr1() - else - empty = true - statSepOrEnd(stats, noPrevStat = empty) - do () + def loop = + while + var empty = false + if (in.token == IMPORT) + stats ++= importClause() + else if (in.token == EXPORT) + stats ++= exportClause() + else if isIdent(nme.extension) && followingIsExtension() then + stats += extension() + else if (isDefIntro(modifierTokensOrCase)) + stats +++= defOrDcl(in.offset, defAnnotsMods(modifierTokens)) + else if (isExprIntro) + stats += expr1(inStatSeq = true) + else + empty = true + statSepOrEnd(stats, noPrevStat = empty) + do () + if self != null && !startsAfterLineEnd then + indentedRegionAfterArrow(loop) + else loop (self, if stats.isEmpty then List(EmptyTree) else stats.toList) } @@ -4297,7 +4372,7 @@ object Parsers { if (in.token == IMPORT) stats ++= importClause() else if (isExprIntro) - stats += expr(Location.InBlock) + stats += expr(Location.InBlock, inStatSeq = true) else if in.token == IMPLICIT && !in.inModifierPosition() then stats += closure(in.offset, Location.InBlock, modifiers(BitSet(IMPLICIT))) else if isIdent(nme.extension) && followingIsExtension() then @@ -4316,12 +4391,12 @@ object Parsers { def compilationUnit(): Tree = checkNoEscapingPlaceholders { def topstats(): List[Tree] = { val ts = new ListBuffer[Tree] - while (in.token == SEMI) in.nextToken() + while (in.token == SEMI) nextToken() val start = in.offset if (in.token == PACKAGE) { - in.nextToken() + nextToken() if (in.token == OBJECT) { - in.nextToken() + nextToken() ts += objectDef(start, Modifiers(Package)) if (in.token != EOF) { statSepOrEnd(ts, what = "toplevel definition") @@ -4366,7 +4441,7 @@ object Parsers { def skipBracesHook(): Option[Tree] = if (in.token == XMLSTART) Some(xmlLiteral()) else None - override def blockExpr(): Tree = { + override def blockExpr(inStatSeq: Boolean): Tree = { skipBraces() EmptyTree } diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index e29b858e0978..dc4928439112 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -60,7 +60,7 @@ object Scanners { /** the base of a number */ var base: Int = 0 - def copyFrom(td: TokenData): Unit = { + def copyFrom(td: TokenData): this.type = { this.token = td.token this.offset = td.offset this.lastOffset = td.lastOffset @@ -68,8 +68,11 @@ object Scanners { this.name = td.name this.strVal = td.strVal this.base = td.base + this } + def saveCopy: TokenData = newTokenData.copyFrom(this) + def isNewLine = token == NEWLINE || token == NEWLINES def isStatSep = isNewLine || token == SEMI def isIdent = token == IDENTIFIER || token == BACKQUOTED_IDENT @@ -86,12 +89,14 @@ object Scanners { def isOperator = token == BACKQUOTED_IDENT - || token == IDENTIFIER && isOperatorPart(name(name.length - 1)) + || token == IDENTIFIER && isOperatorPart(name.last) def isArrow = token == ARROW || token == CTXARROW } + def newTokenData: TokenData = new TokenData {} + abstract class ScannerCommon(source: SourceFile)(using Context) extends CharArrayReader with TokenData { val buf: Array[Char] = source.content def nextToken(): Unit @@ -264,11 +269,10 @@ object Scanners { if (idx >= 0 && idx <= lastKeywordStart) handleMigration(kwArray(idx)) else IDENTIFIER - def newTokenData: TokenData = new TokenData {} - /** We need one token lookahead and one token history */ val next = newTokenData + val last = newTokenData private val prev = newTokenData /** The current region. This is initially an Indented region with zero indentation width. */ @@ -385,6 +389,7 @@ object Scanners { /** Produce next token, filling TokenData fields of Scanner. */ def nextToken(): Unit = + last.copyFrom(this) val lastToken = token val lastName = name adjustSepRegions(lastToken) @@ -433,7 +438,7 @@ object Scanners { // in backticks and is a binary operator. Hence, `x` is not classified as a // leading infix operator. def assumeStartsExpr(lexeme: TokenData) = - (canStartExprTokens.contains(lexeme.token) || lexeme.token == COLONeol) + (canStartExprTokens.contains(lexeme.token) || lexeme.token == COLONfollow) && (!lexeme.isOperator || nme.raw.isUnary(lexeme.name)) val lookahead = LookaheadScanner() lookahead.allowLeadingInfixOperators = false @@ -483,7 +488,7 @@ object Scanners { if (nextChar == ch) recur(idx - 1, ch, n + 1, k) else { - val k1: IndentWidth => IndentWidth = if (n == 0) k else Conc(_, Run(ch, n)) + val k1: IndentWidth => IndentWidth = if (n == 0) k else iw => k(Conc(iw, Run(ch, n))) recur(idx - 1, nextChar, 1, k1) } else recur(idx - 1, ' ', 0, identity) @@ -523,7 +528,7 @@ object Scanners { * * The following tokens can start an indentation region: * - * : = => <- if then else while do try catch + * : = => <- if then else while do try catch * finally for yield match throw return with * * Inserting an INDENT starts a new indentation region with the indentation of the current @@ -638,7 +643,8 @@ object Scanners { currentRegion.knownWidth = nextWidth else if (lastWidth != nextWidth) val lw = lastWidth - errorButContinue(spaceTabMismatchMsg(lw, nextWidth)) + val msg = spaceTabMismatchMsg(lw, nextWidth) + if rewriteToIndent then report.warning(msg) else errorButContinue(msg) if token != OUTDENT then handleNewIndentWidth(currentRegion, _.otherIndentWidths += nextWidth) if next.token == EMPTY then @@ -1266,6 +1272,7 @@ object Scanners { putChar(ch) ; nextRawChar() loopRest() else + next.lineOffset = if next.lastOffset < lineStartOffset then lineStartOffset else -1 finishNamedToken(IDENTIFIER, target = next) end loopRest setStrVal() @@ -1312,10 +1319,10 @@ object Scanners { } end getStringPart - private def fetchStringPart(multiLine: Boolean) = { + private def fetchStringPart(multiLine: Boolean) = offset = charOffset - 1 + lineOffset = if lastOffset < lineStartOffset then lineStartOffset else -1 getStringPart(multiLine) - } private def isTripleQuote(): Boolean = if (ch == '"') { @@ -1657,21 +1664,31 @@ object Scanners { case Run(ch: Char, n: Int) case Conc(l: IndentWidth, r: Run) - def <= (that: IndentWidth): Boolean = this match { - case Run(ch1, n1) => - that match { - case Run(ch2, n2) => n1 <= n2 && (ch1 == ch2 || n1 == 0) - case Conc(l, r) => this <= l - } - case Conc(l1, r1) => - that match { - case Conc(l2, r2) => l1 == l2 && r1 <= r2 - case _ => false - } - } + def <= (that: IndentWidth): Boolean = (this, that) match + case (Run(ch1, n1), Run(ch2, n2)) => n1 <= n2 && (ch1 == ch2 || n1 == 0) + case (Conc(l1, r1), Conc(l2, r2)) => (l1 == l2 && r1 <= r2) || this <= l2 + case (_, Conc(l2, _)) => this <= l2 + case _ => false def < (that: IndentWidth): Boolean = this <= that && !(that <= this) + def >= (that: IndentWidth): Boolean = that <= this + + def >(that: IndentWidth): Boolean = that < this + + def size: Int = this match + case Run(_, n) => n + case Conc(l, r) => l.size + r.n + + /** Add one level of indentation (one tab or two spaces depending on the last char) */ + def increment: IndentWidth = + def incRun(ch: Char, n: Int): Run = ch match + case ' ' => IndentWidth.Run(' ', n + 2) + case ch => IndentWidth.Run(ch, n + 1) + this match + case Run(ch, n) => incRun(ch, n) + case Conc(l, Run(ch, n)) => Conc(l, incRun(ch, n)) + /** Does `this` differ from `that` by not more than a single space? */ def isClose(that: IndentWidth): Boolean = this match case Run(ch1, n1) => diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala index b3f41fab9eaa..34a179c1be01 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala @@ -324,8 +324,7 @@ object MarkupParsers { /** Some try/catch/finally logic used by xLiteral and xLiteralPattern. */ inline private def xLiteralCommon(f: () => Tree, ifTruncated: String => Unit): Tree = { assert(parser.in.token == Tokens.XMLSTART) - val saved = parser.in.newTokenData - saved.copyFrom(parser.in) + val saved = parser.in.saveCopy var output: Tree = null.asInstanceOf[Tree] try output = f() catch { @@ -404,7 +403,7 @@ object MarkupParsers { def escapeToScala[A](op: => A, kind: String): A = { xEmbeddedBlock = false val res = saving(parser.in.currentRegion, parser.in.currentRegion = _) { - val lbrace = parser.in.newTokenData + val lbrace = Scanners.newTokenData lbrace.token = LBRACE lbrace.offset = parser.in.charOffset - 1 lbrace.lastOffset = parser.in.lastOffset diff --git a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala index f2dfac88d464..45cc3c4ccfe0 100644 --- a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala +++ b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala @@ -25,9 +25,13 @@ object Rewrites { def addPatch(span: Span, replacement: String): Unit = pbuf += Patch(span, replacement) + def patchOver(span: Span, replacement: String): Unit = + pbuf.indices.reverse.find(i => span.contains(pbuf(i).span)).foreach(pbuf.remove) + pbuf += Patch(span, replacement) + def apply(cs: Array[Char]): Array[Char] = { val delta = pbuf.map(_.delta).sum - val patches = pbuf.toList.sortBy(_.span.start) + val patches = pbuf.toList.sortBy(p => (p.span.start, p.span.end)) if (patches.nonEmpty) patches.reduceLeft {(p1, p2) => assert(p1.span.end <= p2.span.start, s"overlapping patches in $source: $p1 and $p2") @@ -71,6 +75,14 @@ object Rewrites { .addPatch(span, replacement) ) + /** Record a patch that replaces the first patch that it contains */ + def patchOver(source: SourceFile, span: Span, replacement: String)(using Context): Unit = + if ctx.reporter != Reporter.NoReporter // NoReporter is used for syntax highlighting + then ctx.settings.rewrite.value.foreach(_.patched + .getOrElseUpdate(source, new Patches(source)) + .patchOver(span, replacement) + ) + /** Patch position in `ctx.compilationUnit.source`. */ def patch(span: Span, replacement: String)(using Context): Unit = patch(ctx.compilationUnit.source, span, replacement) diff --git a/compiler/test/dotty/tools/dotc/CompilationTests.scala b/compiler/test/dotty/tools/dotc/CompilationTests.scala index 4e86a3b83383..7a36de330723 100644 --- a/compiler/test/dotty/tools/dotc/CompilationTests.scala +++ b/compiler/test/dotty/tools/dotc/CompilationTests.scala @@ -82,9 +82,14 @@ class CompilationTests { compileFile("tests/rewrites/rewrites3x.scala", defaultOptions.and("-rewrite", "-source", "future-migration")), compileFile("tests/rewrites/filtering-fors.scala", defaultOptions.and("-rewrite", "-source", "3.2-migration")), compileFile("tests/rewrites/refutable-pattern-bindings.scala", defaultOptions.and("-rewrite", "-source", "3.2-migration")), - compileFile("tests/rewrites/i8982.scala", defaultOptions.and("-indent", "-rewrite")), - compileFile("tests/rewrites/i9632.scala", defaultOptions.and("-indent", "-rewrite")), - compileFile("tests/rewrites/i11895.scala", defaultOptions.and("-indent", "-rewrite")), + compileFile("tests/rewrites/i8982.scala", indentRewrite), + compileFile("tests/rewrites/i9632.scala", indentRewrite), + compileFile("tests/rewrites/i11895.scala", indentRewrite), + compileFile("tests/rewrites/indent-rewrite.scala", indentRewrite), + compileFile("tests/rewrites/indent-comments.scala", indentRewrite), + compileFile("tests/rewrites/indent-mix-tab-space.scala", indentRewrite), + compileFile("tests/rewrites/indent-3-spaces.scala", indentRewrite), + compileFile("tests/rewrites/indent-mix-brace.scala", indentRewrite), compileFile("tests/rewrites/i12340.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i17187.scala", unindentOptions.and("-rewrite")), compileFile("tests/rewrites/i17399.scala", unindentOptions.and("-rewrite")), diff --git a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala index 5d2992b50a09..170454ac3347 100644 --- a/compiler/test/dotty/tools/vulpix/TestConfiguration.scala +++ b/compiler/test/dotty/tools/vulpix/TestConfiguration.scala @@ -65,6 +65,7 @@ object TestConfiguration { val commonOptions = Array("-indent") ++ checkOptions ++ noCheckOptions ++ yCheckOptions val defaultOptions = TestFlags(basicClasspath, commonOptions) + val indentRewrite = defaultOptions.and("-rewrite") val unindentOptions = TestFlags(basicClasspath, Array("-no-indent") ++ checkOptions ++ noCheckOptions ++ yCheckOptions) val withCompilerOptions = defaultOptions.withClasspath(withCompilerClasspath).withRunClasspath(withCompilerClasspath) diff --git a/tests/pos/indent-colons.scala b/tests/pos/indent-colons.scala index eb3cbc3617ea..a48d0ffa010f 100644 --- a/tests/pos/indent-colons.scala +++ b/tests/pos/indent-colons.scala @@ -152,6 +152,11 @@ object Test23: val _ = 1 `+`: // ok x + // leading infix op + val _ = 1 + `+` : + x + val r = 1 to: 100 diff --git a/tests/rewrites/indent-3-spaces.check b/tests/rewrites/indent-3-spaces.check new file mode 100644 index 000000000000..a9dd85872ed4 --- /dev/null +++ b/tests/rewrites/indent-3-spaces.check @@ -0,0 +1,21 @@ +// Rewrite to indent, keeping 3 spaces as indentation + +def m1 = + def m2 = + "" + + "" + + "" + m2 + +def m4 = + def m5 = + def m6 = + val x = "" + x + .apply(0) + .toString + m6 + .toString + m5 + + m5 + .toString diff --git a/tests/rewrites/indent-3-spaces.scala b/tests/rewrites/indent-3-spaces.scala new file mode 100644 index 000000000000..9df6e253f074 --- /dev/null +++ b/tests/rewrites/indent-3-spaces.scala @@ -0,0 +1,26 @@ +// Rewrite to indent, keeping 3 spaces as indentation + +def m1 = { + def m2 = { + "" + + "" + + "" + } + m2 +} + +def m4 = { + def m5 = { + def m6 = { + val x = "" + x + .apply(0) + .toString + } + m6 + .toString + } + m5 + + m5 + .toString +} diff --git a/tests/rewrites/indent-comments.check b/tests/rewrites/indent-comments.check new file mode 100644 index 000000000000..38290f23e8fa --- /dev/null +++ b/tests/rewrites/indent-comments.check @@ -0,0 +1,25 @@ +// Rewriting to indent should preserve comments +class A: /* 1 */ /* 2 */ + def m1(b: Boolean) = /* 3 */ /* 4 */ + val x = if (b) + /* 5 */ + "true" + /* 6 */ + else + /* 7 */ + "false" + /* 8 */ + /* 9 */ x.toBoolean + /* 10 */ /* 11 */ + /* 12 */def m2 = // 12 + m1:// 14 + /* 15 */// 16 + true + /* 17 */// 18 +// because of the missing indent before { +// the scanner inserts a new line between || and { +// cannot rewrite to indentation without messing the comments up + true ||// 19 + /* 20 */{ + false + }// 21 diff --git a/tests/rewrites/indent-comments.scala b/tests/rewrites/indent-comments.scala new file mode 100644 index 000000000000..87bc8bda33d6 --- /dev/null +++ b/tests/rewrites/indent-comments.scala @@ -0,0 +1,27 @@ +// Rewriting to indent should preserve comments +class A /* 1 */ { /* 2 */ + def m1(b: Boolean) = /* 3 */ { /* 4 */ + val x = if (b) + /* 5 */ { + "true" + } /* 6 */ + else + { /* 7 */ + "false" + /* 8 */ } +/* 9 */ x.toBoolean + /* 10 */ } /* 11 */ +/* 12 */def m2 = {// 12 +m1// 14 + /* 15 */{// 16 +true +/* 17 */}// 18 +// because of the missing indent before { +// the scanner inserts a new line between || and { +// cannot rewrite to indentation without messing the comments up +true ||// 19 +/* 20 */{ + false +}// 21 +} +} diff --git a/tests/rewrites/indent-mix-brace.check b/tests/rewrites/indent-mix-brace.check new file mode 100644 index 000000000000..eb4752e1cb2b --- /dev/null +++ b/tests/rewrites/indent-mix-brace.check @@ -0,0 +1,17 @@ +// A mix of nested in-brace regions and indented regions + +class A: + def m1 = + "" + + def m2 = + def m3 = + val x = "" + x + m3 + +class B: + def foo = + def bar = + "" + bar diff --git a/tests/rewrites/indent-mix-brace.scala b/tests/rewrites/indent-mix-brace.scala new file mode 100644 index 000000000000..944537fc341f --- /dev/null +++ b/tests/rewrites/indent-mix-brace.scala @@ -0,0 +1,21 @@ +// A mix of nested in-brace regions and indented regions + +class A: + def m1 = { + "" + } + + def m2 = { +def m3 = + val x = "" + x +m3 + } + +class B { + def foo = + def bar = { + "" + } + bar +} diff --git a/tests/rewrites/indent-mix-tab-space.check b/tests/rewrites/indent-mix-tab-space.check new file mode 100644 index 000000000000..4f25839ccfda --- /dev/null +++ b/tests/rewrites/indent-mix-tab-space.check @@ -0,0 +1,22 @@ +// Contains an ugly but valid mix of spaces and tabs +// Rewrite to significant indentation syntax + +def m1 = + def m2 = + "" + + "" + + "" + m2 + +def m4 = + def m5 = + def m6 = + val x = "" + x + .apply(0) + .toString + m6 + .toString + m5 + + m5 + .toString diff --git a/tests/rewrites/indent-mix-tab-space.scala b/tests/rewrites/indent-mix-tab-space.scala new file mode 100644 index 000000000000..4a77fd1cbde6 --- /dev/null +++ b/tests/rewrites/indent-mix-tab-space.scala @@ -0,0 +1,27 @@ +// Contains an ugly but valid mix of spaces and tabs +// Rewrite to significant indentation syntax + +def m1 = { + def m2 = { + "" + + "" + + "" + } + m2 +} + +def m4 = { + def m5 = { + def m6 = { + val x = "" + x + .apply(0) + .toString + } + m6 + .toString + } + m5 + + m5 + .toString +} diff --git a/tests/rewrites/indent-rewrite.check b/tests/rewrites/indent-rewrite.check new file mode 100644 index 000000000000..5d21af14cc85 --- /dev/null +++ b/tests/rewrites/indent-rewrite.check @@ -0,0 +1,233 @@ +// A collection of patterns found when rewriting the community build to indent + +trait C1: + + class CC1 +// do not remove braces if empty region + class CC2 { + + } +// do not remove braces if open brace is not followed by new line + def m1(x: Int) = + { x + .toString + } +// add indent to pass an argument (fewer braces) + def m2: String = + m1: + 5 +// indent inner method + def m3: Int = + def seq = + Seq( + "1", + "2" + ) + seq + (1) + .toInt +// indent refinement + def m4: Any: + def foo: String + = + new: + def foo: String = + """ +Hello, World! +""" +// indent end marker + end m4 + +// fix off-by-one indentation + val x = "" + + def m5(x: String): String = + def inner: Boolean = + true + x + + // unindent properly when needed + def m6(xs: Seq[String]): String = + xs + .map: + x => x + .filter: + x => x.size > 0 + println("foo") + + def foo: String = + "" + foo + +// do not remove braces if closing braces not followed by new line + def m7: String = { + val x = "Hi" + x + }; def m8(x: String): String = + s"""Bye $x ${ + x + } +do not indent in a multiline string""" + def m9 = + val foo = "" + val x = Seq( + s"${foo}", + "" + ) + +// do not remove braces after closing brace + def m10(x: Int)(y: String) = y * x + m10 { 5 } { + "foo" + } + + // preserve indent of chained calls + def m11(xs: Seq[String]) = + xs + .filter: + _ => true + xs + .map { x => + val y = + if (x == "") "empty" + else x.size.toString + val z = x + y + z + } + .map { x => xs }.flatMap { xs => xs.map { x => + val y = + if (x == "") "empty" + else x.size.toString + val z = x + y + z + }} + .map: + x => val y = + if (x == "") "empty" + else x.size.toString + val z = x + y + z + .map: + x => + val y = + if (x == "") "empty" + else x.size.toString + val z = x + y + z + + // do not remove braces inside (...) or [...] + // remove braces after => + def m12(xs: List[Int]) = + println( + xs.size match { + case 1 => + xs match + case 1 :: Nil => "1" + case _ => s"${xs.head} :: Nil" + case _ => { + "xs" + } + } + ) + println( + if (xs.size > 0) { + "foo" + } else { + "bar" + } + ) + xs.map( + x => { + x + } + ).map: + x => { + x + } + import reflect.Selectable.reflectiveSelectable + def m13(xs: List[ + Any { + def foo: String + } + ]) = + xs.map(x => x.foo) + + // preserve indentation style before 'case' + // but fix indentation inside 'case' + def m14(o: Option[String]) = + o match + case Some(x) => x + case None => "" + + o match + case Some(x) => x + case None => "" + + o match + case None => + "" + case Some(x) => + x + def m15(xs: List[Int]): String = + xs match + case _ :: tail => { + if tail.size == 0 then + println("log") + } + "foo" + case Nil => + "bar" + + // add backticks around operator + object `*:`: + def foo = ??? + def m16 = + val x = 5 * { + 2 + } == 10 `||`: + false + x `&&`: + true + + // leading infix operator + def m17 = + true + && { + false + } + + // ident ending with '_' + def m_(x: String) = ??? + m_ : + "foo" + + // do not remove braces in sequence of blocks + def m18(using ctx: String) = println(ctx) + { + given String = "foo" + m18 + } + { + given String = "bar" + m18 + } + def m19(x: String) = + { + given String = "foo" + m18 + } + { + given String = "bar" + m18 + } + +// indent template after self type +class C2 { self => + val x = "" +} +trait C3: + self => + val x = "" +case class C4(): + self => + val y = "" diff --git a/tests/rewrites/indent-rewrite.scala b/tests/rewrites/indent-rewrite.scala new file mode 100644 index 000000000000..c3d3a6c5b66b --- /dev/null +++ b/tests/rewrites/indent-rewrite.scala @@ -0,0 +1,265 @@ +// A collection of patterns found when rewriting the community build to indent + +trait C1 { + + class CC1 +// do not remove braces if empty region +class CC2 { + +} +// do not remove braces if open brace is not followed by new line +def m1(x: Int) = +{ x +.toString + } +// add indent to pass an argument (fewer braces) +def m2: String = { +m1 { +5 +} +} +// indent inner method + def m3: Int = { +def seq = { +Seq( +"1", +"2" +) +} +seq +(1) +.toInt +} +// indent refinement +def m4: Any { +def foo: String +} += + new { + def foo: String = + """ +Hello, World! +""" +} +// indent end marker +end m4 + +// fix off-by-one indentation + val x = "" + + def m5(x: String): String = { + def inner: Boolean = { + true + } + x + } + + // unindent properly when needed + def m6(xs: Seq[String]): String = { + xs + .map { + x => x + } + .filter { + x => x.size > 0 + } + println("foo") + + def foo: String = + "" + foo + } + +// do not remove braces if closing braces not followed by new line +def m7: String = { +val x = "Hi" +x +}; def m8(x: String): String = { +s"""Bye $x ${ + x +} +do not indent in a multiline string""" +} + def m9 = { + val foo = "" + val x = Seq( + s"${foo}", + "" + ) + } + +// do not remove braces after closing brace +def m10(x: Int)(y: String) = y * x +m10 { 5 } { + "foo" +} + + // preserve indent of chained calls + def m11(xs: Seq[String]) = { + xs + .filter { + _ => true + } + xs + .map { x => + val y = + if (x == "") "empty" + else x.size.toString + val z = x + y + z + } + .map { x => xs }.flatMap { xs => xs.map { x => + val y = + if (x == "") "empty" + else x.size.toString + val z = x + y + z + }} + .map { + x => val y = + if (x == "") "empty" + else x.size.toString + val z = x + y + z + } + .map { + x => + val y = + if (x == "") "empty" + else x.size.toString + val z = x + y + z + } + } + + // do not remove braces inside (...) or [...] + // remove braces after => + def m12(xs: List[Int]) = { + println( + xs.size match { + case 1 => + xs match { + case 1 :: Nil => "1" + case _ => s"${xs.head} :: Nil" + } + case _ => { + "xs" + } + } + ) + println( + if (xs.size > 0) { + "foo" + } else { + "bar" + } + ) + xs.map( + x => { + x + } + ).map { + x => { + x + } + } + } + import reflect.Selectable.reflectiveSelectable + def m13(xs: List[ + Any { + def foo: String + } + ]) = + xs.map(x => x.foo) + + // preserve indentation style before 'case' + // but fix indentation inside 'case' + def m14(o: Option[String]) = { + o match + case Some(x) => x + case None => "" + + o match + case Some(x) => x + case None => "" + + o match { + case None => + "" + case Some(x) => + x + } + } + def m15(xs: List[Int]): String = { + xs match { + case _ :: tail => { + if tail.size == 0 then + println("log") + } + "foo" + case Nil => + "bar" + } + } + + // add backticks around operator + object *:{ + def foo = ??? + } + def m16 = + val x = 5 * { + 2 + } == 10 || { + false + } + x `&&` { + true + } + + // leading infix operator + def m17 = + true + && { + false + } + + // ident ending with '_' + def m_(x: String) = ??? + m_ { + "foo" + } + + // do not remove braces in sequence of blocks + def m18(using ctx: String) = println(ctx) + { + given String = "foo" + m18 + } + { + given String = "bar" + m18 + } + def m19(x: String) = { + { + given String = "foo" + m18 + } + { + given String = "bar" + m18 + } + } +} + +// indent template after self type +class C2 { self => +val x = "" +} +trait C3 { + self => +val x = "" +} +case class C4() { +self => + val y = "" +} From 4b74746b3a13b56e526f48578b3996c36e5defdc Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 30 May 2023 13:22:34 +0200 Subject: [PATCH 3/5] Make scala3-compiler indent rewrittable --- .../internal/SemanticdbInputStream.scala | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala index 8aed9e5b9771..823085d55d92 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala @@ -364,17 +364,13 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { var pos: Int = bufferPos var i: Int = 0 while (i < 10) { - { - if (buffer(({ - pos += 1; pos - 1 - })) >= 0) { - bufferPos = pos - return - } + if (buffer(({ + pos += 1; pos - 1 + })) >= 0) { + bufferPos = pos + return } - ({ - i += 1; i - 1 - }) + i += 1; i - 1 } } skipRawVarintSlowPath From 25f6977c45c7d71164a0690f073bd3ed35168448 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 30 May 2023 13:26:27 +0200 Subject: [PATCH 4/5] Rewrite scala3-library to indent --- .../scala/runtime/TupledFunctions.scala | 3 +- library/src/scala/CanEqual.scala | 3 +- library/src/scala/IArray.scala | 20 +- library/src/scala/Tuple.scala | 96 +++---- library/src/scala/annotation/newMain.scala | 36 +-- library/src/scala/deriving/Mirror.scala | 15 +- library/src/scala/quoted/Expr.scala | 21 +- library/src/scala/quoted/ExprMap.scala | 24 +- library/src/scala/quoted/FromExpr.scala | 222 ++++++---------- library/src/scala/quoted/Quotes.scala | 63 ++--- library/src/scala/quoted/ToExpr.scala | 243 ++++++------------ library/src/scala/quoted/Type.scala | 3 +- library/src/scala/quoted/Varargs.scala | 12 +- .../src/scala/quoted/runtime/Patterns.scala | 3 +- library/src/scala/runtime/Arrays.scala | 6 +- library/src/scala/runtime/FunctionXXL.scala | 3 +- library/src/scala/runtime/LazyVals.scala | 60 ++--- library/src/scala/runtime/TupleXXL.scala | 18 +- library/src/scala/runtime/Tuples.scala | 190 +++++--------- library/src/scala/runtime/TypeBox.scala | 3 +- .../src/scala/runtime/coverage/Invoker.scala | 6 +- .../scala/runtime/stdLibPatches/Predef.scala | 3 +- .../src/scala/util/CommandLineParser.scala | 21 +- library/src/scala/util/FromDigits.scala | 36 +-- library/src/scala/util/NotGiven.scala | 6 +- .../scala/util/control/NonLocalReturns.scala | 15 +- 26 files changed, 380 insertions(+), 751 deletions(-) diff --git a/library/src-bootstrapped/scala/runtime/TupledFunctions.scala b/library/src-bootstrapped/scala/runtime/TupledFunctions.scala index c4010de1c754..758b40d6d3d0 100644 --- a/library/src-bootstrapped/scala/runtime/TupledFunctions.scala +++ b/library/src-bootstrapped/scala/runtime/TupledFunctions.scala @@ -4,7 +4,7 @@ import scala.util.TupledFunction import scala.annotation.experimental @experimental -object TupledFunctions { +object TupledFunctions: def tupledFunction0[F, G]: TupledFunction[F, G] = TupledFunction[F, G]( tupledImpl = (f: F) => ((args: EmptyTuple) => f.asInstanceOf[() => Any].apply()).asInstanceOf[G], @@ -162,4 +162,3 @@ object TupledFunctions { }.asInstanceOf[F] ) -} diff --git a/library/src/scala/CanEqual.scala b/library/src/scala/CanEqual.scala index 8c331bb21b43..b9bd961aa74a 100644 --- a/library/src/scala/CanEqual.scala +++ b/library/src/scala/CanEqual.scala @@ -11,7 +11,7 @@ sealed trait CanEqual[-L, -R] * CanEqual instances involving primitive types or the Null type are handled directly in * the compiler (see Implicits.synthesizedCanEqual), so they are not included here. */ -object CanEqual { +object CanEqual: /** A universal `CanEqual` instance. */ object derived extends CanEqual[Any, Any] @@ -44,4 +44,3 @@ object CanEqual { given canEqualEither[L1, R1, L2, R2]( using eqL: CanEqual[L1, L2], eqR: CanEqual[R1, R2] ): CanEqual[Either[L1, R1], Either[L2, R2]] = derived -} diff --git a/library/src/scala/IArray.scala b/library/src/scala/IArray.scala index 72124bdf8215..792e85702bcf 100644 --- a/library/src/scala/IArray.scala +++ b/library/src/scala/IArray.scala @@ -606,7 +606,7 @@ object IArray: * @param ys an array of AnyRef * @return true if corresponding elements are equal */ - def equals(xs: IArray[AnyRef], ys: IArray[AnyRef]): Boolean = + def equals(xs: IArray[AnyRef], ys: IArray[AnyRef]): Boolean = Array.equals(xs.asInstanceOf[Array[AnyRef]], ys.asInstanceOf[Array[AnyRef]]) /** Returns a decomposition of the array into a sequence. This supports @@ -624,15 +624,13 @@ object IArray: /** Apply `f` to each element for its side effects. * Note: [U] parameter needed to help scalac's type inference. */ - def foreach[U](f: T => U): Unit = { + def foreach[U](f: T => U): Unit = val len = xs.length var i = 0 - while(i < len) { + while(i < len) val x = xs(i) if(p(x)) f(x) i += 1 - } - } /** Builds a new array by applying a function to all elements of this array. * @@ -641,16 +639,14 @@ object IArray: * @return a new array resulting from applying the given function * `f` to each element of this array and collecting the results. */ - def map[U: ClassTag](f: T => U): IArray[U] = { + def map[U: ClassTag](f: T => U): IArray[U] = val b = IArray.newBuilder[U] var i = 0 - while (i < xs.length) { + while (i < xs.length) val x = xs(i) if(p(x)) b += f(x) i = i + 1 - } b.result() - } /** Builds a new array by applying a function to all elements of this array * and using the elements of the resulting collections. @@ -660,16 +656,14 @@ object IArray: * @return a new array resulting from applying the given collection-valued function * `f` to each element of this array and concatenating the results. */ - def flatMap[U: ClassTag](f: T => IterableOnce[U]): IArray[U] = { + def flatMap[U: ClassTag](f: T => IterableOnce[U]): IArray[U] = val b = IArray.newBuilder[U] var i = 0 - while(i < xs.length) { + while(i < xs.length) val x = xs(i) if(p(x)) b ++= f(xs(i)) i += 1 - } b.result() - } def flatMap[BS, U](f: T => BS)(using asIterable: BS => Iterable[U], m: ClassTag[U]): IArray[U] = flatMap[U](x => asIterable(f(x))) diff --git a/library/src/scala/Tuple.scala b/library/src/scala/Tuple.scala index fa72e320b560..e44d20dff1a1 100644 --- a/library/src/scala/Tuple.scala +++ b/library/src/scala/Tuple.scala @@ -5,7 +5,7 @@ import compiletime._ import compiletime.ops.int._ /** Tuple of arbitrary arity */ -sealed trait Tuple extends Product { +sealed trait Tuple extends Product: import Tuple._ /** Create a copy of this tuple as an Array */ @@ -78,59 +78,49 @@ sealed trait Tuple extends Product { */ inline def splitAt[This >: this.type <: Tuple](n: Int): Split[This, n.type] = runtime.Tuples.splitAt(this, n).asInstanceOf[Split[This, n.type]] -} -object Tuple { +object Tuple: /** Type of a tuple with an element appended */ - type Append[X <: Tuple, Y] <: NonEmptyTuple = X match { + type Append[X <: Tuple, Y] <: NonEmptyTuple = X match case EmptyTuple => Y *: EmptyTuple case x *: xs => x *: Append[xs, Y] - } /** Type of the head of a tuple */ - type Head[X <: NonEmptyTuple] = X match { + type Head[X <: NonEmptyTuple] = X match case x *: _ => x - } /** Type of the initial part of the tuple without its last element */ - type Init[X <: Tuple] <: Tuple = X match { + type Init[X <: Tuple] <: Tuple = X match case _ *: EmptyTuple => EmptyTuple case x *: xs => x *: Init[xs] - } /** Type of the tail of a tuple */ - type Tail[X <: NonEmptyTuple] <: Tuple = X match { + type Tail[X <: NonEmptyTuple] <: Tuple = X match case _ *: xs => xs - } /** Type of the last element of a tuple */ - type Last[X <: Tuple] = X match { + type Last[X <: Tuple] = X match case x *: EmptyTuple => x case _ *: xs => Last[xs] - } /** Type of the concatenation of two tuples */ - type Concat[X <: Tuple, +Y <: Tuple] <: Tuple = X match { + type Concat[X <: Tuple, +Y <: Tuple] <: Tuple = X match case EmptyTuple => Y case x1 *: xs1 => x1 *: Concat[xs1, Y] - } /** Type of the element at position N in the tuple X */ - type Elem[X <: Tuple, N <: Int] = X match { + type Elem[X <: Tuple, N <: Int] = X match case x *: xs => - N match { + N match case 0 => x case S[n1] => Elem[xs, n1] - } - } /** Literal constant Int size of a tuple */ - type Size[X <: Tuple] <: Int = X match { + type Size[X <: Tuple] <: Int = X match case EmptyTuple => 0 case x *: xs => S[Size[xs]] - } /** Fold a tuple `(T1, ..., Tn)` into `F[T1, F[... F[Tn, Z]...]]]` */ type Fold[Tup <: Tuple, Z, F[_, _]] = Tup match @@ -138,16 +128,14 @@ object Tuple { case h *: t => F[h, Fold[t, Z, F]] /** Converts a tuple `(T1, ..., Tn)` to `(F[T1], ..., F[Tn])` */ - type Map[Tup <: Tuple, F[_ <: Union[Tup]]] <: Tuple = Tup match { + type Map[Tup <: Tuple, F[_ <: Union[Tup]]] <: Tuple = Tup match case EmptyTuple => EmptyTuple case h *: t => F[h] *: Map[t, F] - } /** Converts a tuple `(T1, ..., Tn)` to a flattened `(..F[T1], ..., ..F[Tn])` */ - type FlatMap[Tup <: Tuple, F[_ <: Union[Tup]] <: Tuple] <: Tuple = Tup match { + type FlatMap[Tup <: Tuple, F[_ <: Union[Tup]] <: Tuple] <: Tuple = Tup match case EmptyTuple => EmptyTuple case h *: t => Concat[F[h], FlatMap[t, F]] - } /** Filters out those members of the tuple for which the predicate `P` returns `false`. * A predicate `P[X]` is a type that can be either `true` or `false`. For example: @@ -160,31 +148,27 @@ object Tuple { * ``` * @syntax markdown */ - type Filter[Tup <: Tuple, P[_] <: Boolean] <: Tuple = Tup match { + type Filter[Tup <: Tuple, P[_] <: Boolean] <: Tuple = Tup match case EmptyTuple => EmptyTuple - case h *: t => P[h] match { - case true => h *: Filter[t, P] - case false => Filter[t, P] - } - } + case h *: t => P[h] match + case true => h *: Filter[t, P] + case false => Filter[t, P] /** Given two tuples, `A1 *: ... *: An * At` and `B1 *: ... *: Bn *: Bt` * where at least one of `At` or `Bt` is `EmptyTuple` or `Tuple`, * returns the tuple type `(A1, B1) *: ... *: (An, Bn) *: Ct` * where `Ct` is `EmptyTuple` if `At` or `Bt` is `EmptyTuple`, otherwise `Ct` is `Tuple`. */ - type Zip[T1 <: Tuple, T2 <: Tuple] <: Tuple = (T1, T2) match { + type Zip[T1 <: Tuple, T2 <: Tuple] <: Tuple = (T1, T2) match case (h1 *: t1, h2 *: t2) => (h1, h2) *: Zip[t1, t2] case (EmptyTuple, _) => EmptyTuple case (_, EmptyTuple) => EmptyTuple case _ => Tuple - } /** Converts a tuple `(F[T1], ..., F[Tn])` to `(T1, ... Tn)` */ - type InverseMap[X <: Tuple, F[_]] <: Tuple = X match { + type InverseMap[X <: Tuple, F[_]] <: Tuple = X match case F[x] *: t => x *: InverseMap[t, F] case EmptyTuple => EmptyTuple - } /** Implicit evidence. IsMappedBy[F][X] is present in the implicit scope iff * X is a tuple for which each element's type is constructed via `F`. E.g. @@ -194,22 +178,18 @@ object Tuple { type IsMappedBy[F[_]] = [X <: Tuple] =>> X =:= Map[InverseMap[X, F], F] /** Transforms a tuple `(T1, ..., Tn)` into `(T1, ..., Ti)`. */ - type Take[T <: Tuple, N <: Int] <: Tuple = N match { + type Take[T <: Tuple, N <: Int] <: Tuple = N match case 0 => EmptyTuple - case S[n1] => T match { - case EmptyTuple => EmptyTuple - case x *: xs => x *: Take[xs, n1] - } - } + case S[n1] => T match + case EmptyTuple => EmptyTuple + case x *: xs => x *: Take[xs, n1] /** Transforms a tuple `(T1, ..., Tn)` into `(Ti+1, ..., Tn)`. */ - type Drop[T <: Tuple, N <: Int] <: Tuple = N match { + type Drop[T <: Tuple, N <: Int] <: Tuple = N match case 0 => T - case S[n1] => T match { - case EmptyTuple => EmptyTuple - case x *: xs => Drop[xs, n1] - } - } + case S[n1] => T match + case EmptyTuple => EmptyTuple + case x *: xs => Drop[xs, n1] /** Splits a tuple (T1, ..., Tn) into a pair of two tuples `(T1, ..., Ti)` and * `(Ti+1, ..., Tn)`. @@ -231,23 +211,19 @@ object Tuple { def unapply(x: EmptyTuple): true = true /** Convert an array into a tuple of unknown arity and types */ - def fromArray[T](xs: Array[T]): Tuple = { - val xs2 = xs match { + def fromArray[T](xs: Array[T]): Tuple = + val xs2 = xs match case xs: Array[Object] => xs case xs => xs.map(_.asInstanceOf[Object]) - } runtime.Tuples.fromArray(xs2) - } /** Convert an immutable array into a tuple of unknown arity and types */ - def fromIArray[T](xs: IArray[T]): Tuple = { - val xs2: IArray[Object] = xs match { + def fromIArray[T](xs: IArray[T]): Tuple = + val xs2: IArray[Object] = xs match case xs: IArray[Object] @unchecked => xs case _ => xs.map(_.asInstanceOf[Object]) - } runtime.Tuples.fromIArray(xs2) - } /** Convert a Product into a tuple of unknown arity and types */ def fromProduct(product: Product): Tuple = @@ -260,18 +236,16 @@ object Tuple { given canEqualTuple[H1, T1 <: Tuple, H2, T2 <: Tuple]( using eqHead: CanEqual[H1, H2], eqTail: CanEqual[T1, T2] ): CanEqual[H1 *: T1, H2 *: T2] = CanEqual.derived -} /** A tuple of 0 elements */ type EmptyTuple = EmptyTuple.type /** A tuple of 0 elements. */ -case object EmptyTuple extends Tuple { +case object EmptyTuple extends Tuple: override def toString(): String = "()" -} /** Tuple of arbitrary non-zero arity */ -sealed trait NonEmptyTuple extends Tuple { +sealed trait NonEmptyTuple extends Tuple: import Tuple._ /** Get the i-th element of this tuple. @@ -298,11 +272,9 @@ sealed trait NonEmptyTuple extends Tuple { inline def tail[This >: this.type <: NonEmptyTuple]: Tail[This] = runtime.Tuples.tail(this).asInstanceOf[Tail[This]] -} @showAsInfix sealed abstract class *:[+H, +T <: Tuple] extends NonEmptyTuple -object *: { +object `*:`: def unapply[H, T <: Tuple](x: H *: T): (H, T) = (x.head, x.tail) -} diff --git a/library/src/scala/annotation/newMain.scala b/library/src/scala/annotation/newMain.scala index 6864b5accd6c..e7596b673530 100644 --- a/library/src/scala/annotation/newMain.scala +++ b/library/src/scala/annotation/newMain.scala @@ -94,34 +94,30 @@ final class newMain extends MainAnnotation[FromString, Any]: Help.printExplain(info) None else - preProcessArgs(info, names, args).orElse { + preProcessArgs(info, names, args).orElse: Help.printUsage(info) None - } end command - def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = { + def argGetter[T](param: Parameter, arg: String, defaultArgument: Option[() => T])(using p: FromString[T]): () => T = if arg.nonEmpty then parse[T](param, arg) else assert(param.hasDefault) defaultArgument.get - } - def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = { + def varargGetter[T](param: Parameter, args: Seq[String])(using p: FromString[T]): () => Seq[T] = val getters = args.map(arg => parse[T](param, arg)) () => getters.map(_()) - } def run(execProgram: () => Any): Unit = if !hasParseErrors then execProgram() private def preProcessArgs(info: Info, names: Names, args: Seq[String]): Option[Seq[String]] = var hasError: Boolean = false - def error(msg: String): Unit = { + def error(msg: String): Unit = hasError = true println(s"Error: $msg") - } val (positionalArgs, byNameArgsMap) = val positionalArgs = List.newBuilder[String] @@ -235,7 +231,7 @@ final class newMain extends MainAnnotation[FromString, Any]: def printUsage(info: Info): Unit = def argsUsage: Seq[String] = for (param <- info.parameters) - yield { + yield val canonicalName = getNameWithMarker(param.name) val namesPrint = (canonicalName +: param.aliasNames).mkString("[", " | ", "]") val shortTypeName = param.typeName.split('.').last @@ -243,20 +239,17 @@ final class newMain extends MainAnnotation[FromString, Any]: else if param.hasDefault then s"[$namesPrint <$shortTypeName>]" else if param.isFlag then s"$namesPrint" else s"$namesPrint <$shortTypeName>" - } - def wrapArgumentUsages(argsUsage: Seq[String], maxLength: Int): Seq[String] = { + def wrapArgumentUsages(argsUsage: Seq[String], maxLength: Int): Seq[String] = def recurse(args: Seq[String], currentLine: String, acc: Vector[String]): Seq[String] = - (args, currentLine) match { + (args, currentLine) match case (Nil, "") => acc case (Nil, l) => (acc :+ l) case (arg +: t, "") => recurse(t, arg, acc) case (arg +: t, l) if l.length + 1 + arg.length <= maxLength => recurse(t, s"$l $arg", acc) case (arg +: t, l) => recurse(t, arg, acc :+ l) - } recurse(argsUsage, "", Vector()).toList - } val printUsageBeginning = s"Usage: ${info.name} " val argsOffset = printUsageBeginning.length @@ -268,49 +261,44 @@ final class newMain extends MainAnnotation[FromString, Any]: def printExplain(info: Info): Unit = def shiftLines(s: Seq[String], shift: Int): String = s.map(" " * shift + _).mkString("\n") - def wrapLongLine(line: String, maxLength: Int): List[String] = { + def wrapLongLine(line: String, maxLength: Int): List[String] = def recurse(s: String, acc: Vector[String]): Seq[String] = val lastSpace = s.trim.nn.lastIndexOf(' ', maxLength) if ((s.length <= maxLength) || (lastSpace < 0)) acc :+ s - else { + else val (shortLine, rest) = s.splitAt(lastSpace) recurse(rest.trim.nn, acc :+ shortLine) - } recurse(line, Vector()).toList - } println() if (info.documentation.nonEmpty) println(wrapLongLine(info.documentation, maxUsageLineLength).mkString("\n")) - if (info.parameters.nonEmpty) { + if (info.parameters.nonEmpty) val argNameShift = 2 val argDocShift = argNameShift + 2 println("Arguments:") for param <- info.parameters do val canonicalName = getNameWithMarker(param.name) - val otherNames = param.aliasNames match { + val otherNames = param.aliasNames match case Seq() => "" case names => names.mkString("(", ", ", ") ") - } val argDoc = StringBuilder(" " * argNameShift) argDoc.append(s"$canonicalName $otherNames- ${param.typeName.split('.').last}") if param.isVarargs then argDoc.append(" (vararg)") else if param.hasDefault then argDoc.append(" (optional)") - if (param.documentation.nonEmpty) { + if (param.documentation.nonEmpty) val shiftedDoc = param.documentation.split("\n").nn .map(line => shiftLines(wrapLongLine(line.nn, maxUsageLineLength - argDocShift), argDocShift)) .mkString("\n") argDoc.append("\n").append(shiftedDoc) - } println(argDoc) - } end printExplain def shouldPrintDefaultHelp(names: Names, args: Seq[String]): Boolean = diff --git a/library/src/scala/deriving/Mirror.scala b/library/src/scala/deriving/Mirror.scala index 57453a516567..453d5759674c 100644 --- a/library/src/scala/deriving/Mirror.scala +++ b/library/src/scala/deriving/Mirror.scala @@ -2,7 +2,7 @@ package scala.deriving /** Mirrors allows typelevel access to enums, case classes and objects, and their sealed parents. */ -sealed trait Mirror { +sealed trait Mirror: /** The mirrored *-type */ type MirroredMonoType @@ -12,9 +12,8 @@ sealed trait Mirror { /** The names of the product elements */ type MirroredElemLabels <: Tuple -} -object Mirror { +object Mirror: /** The Mirror for a sum type */ trait Sum extends Mirror { self => @@ -23,28 +22,25 @@ object Mirror { } /** The Mirror for a product type */ - trait Product extends Mirror { + trait Product extends Mirror: /** Create a new instance of type `T` with elements taken from product `p`. */ def fromProduct(p: scala.Product): MirroredMonoType - } - trait Singleton extends Product { + trait Singleton extends Product: type MirroredMonoType = this.type type MirroredType = this.type type MirroredElemTypes = EmptyTuple type MirroredElemLabels = EmptyTuple def fromProduct(p: scala.Product): MirroredMonoType = this - } /** A proxy for Scala 2 singletons, which do not inherit `Singleton` directly */ - class SingletonProxy(val value: AnyRef) extends Product { + class SingletonProxy(val value: AnyRef) extends Product: type MirroredMonoType = value.type type MirroredType = value.type type MirroredElemTypes = EmptyTuple type MirroredElemLabels = EmptyTuple def fromProduct(p: scala.Product): MirroredMonoType = value - } type Of[T] = Mirror { type MirroredType = T; type MirroredMonoType = T ; type MirroredElemTypes <: Tuple } type ProductOf[T] = Mirror.Product { type MirroredType = T; type MirroredMonoType = T ; type MirroredElemTypes <: Tuple } @@ -58,4 +54,3 @@ object Mirror { /** Create a new instance of type `T` with elements taken from tuple `t`. */ def fromTuple(t: p.MirroredElemTypes): T = p.fromProduct(t) -} diff --git a/library/src/scala/quoted/Expr.scala b/library/src/scala/quoted/Expr.scala index 8243e7dc4a4b..2615e9b5cd3e 100644 --- a/library/src/scala/quoted/Expr.scala +++ b/library/src/scala/quoted/Expr.scala @@ -7,7 +7,7 @@ package scala.quoted abstract class Expr[+T] private[scala] () /** Constructors for expressions */ -object Expr { +object Expr: /** `e.betaReduce` returns an expression that is functionally equivalent to `e`, * however if `e` is of the form `((y1, ..., yn) => e2)(e1, ..., en)` @@ -27,10 +27,9 @@ object Expr { * Given list of statements `s1 :: s2 :: ... :: Nil` and an expression `e` the resulting expression * will be equivalent to `'{ $s1; $s2; ...; $e }`. */ - def block[T](statements: List[Expr[Any]], expr: Expr[T])(using Quotes): Expr[T] = { + def block[T](statements: List[Expr[Any]], expr: Expr[T])(using Quotes): Expr[T] = import quotes.reflect._ Block(statements.map(asTerm), expr.asTerm).asExpr.asInstanceOf[Expr[T]] - } /** Creates an expression that will construct the value `x` */ def apply[T](x: T)(using ToExpr[T])(using Quotes): Expr[T] = @@ -78,8 +77,8 @@ object Expr { * to an expression equivalent to * `'{ ($e1, $e2, ...) }` typed as an `Expr[Tuple]` */ - def ofTupleFromSeq(seq: Seq[Expr[Any]])(using Quotes): Expr[Tuple] = { - seq.size match { + def ofTupleFromSeq(seq: Seq[Expr[Any]])(using Quotes): Expr[Tuple] = + seq.size match case 0 => '{ Tuple() } case 1 => ofTupleFromSeq1(seq) case 2 => ofTupleFromSeq2(seq) @@ -104,8 +103,6 @@ object Expr { case 21 => ofTupleFromSeq21(seq) case 22 => ofTupleFromSeq22(seq) case _ => ofTupleFromSeqXXL(seq) - } - } private def ofTupleFromSeq1(seq: Seq[Expr[Any]])(using Quotes): Expr[Tuple] = seq match @@ -228,10 +225,9 @@ object Expr { } /** Given a tuple of the form `(Expr[A1], ..., Expr[An])`, outputs a tuple `Expr[(A1, ..., An)]`. */ - def ofTuple[T <: Tuple: Tuple.IsMappedBy[Expr]: Type](tup: T)(using Quotes): Expr[Tuple.InverseMap[T, Expr]] = { + def ofTuple[T <: Tuple: Tuple.IsMappedBy[Expr]: Type](tup: T)(using Quotes): Expr[Tuple.InverseMap[T, Expr]] = val elems: Seq[Expr[Any]] = tup.asInstanceOf[Product].productIterator.toSeq.asInstanceOf[Seq[Expr[Any]]] ofTupleFromSeq(elems).asExprOf[Tuple.InverseMap[T, Expr]] - } /** Find a given instance of type `T` in the current scope. * Return `Some` containing the expression of the implicit or @@ -239,12 +235,9 @@ object Expr { * * @tparam T type of the implicit parameter */ - def summon[T](using Type[T])(using Quotes): Option[Expr[T]] = { + def summon[T](using Type[T])(using Quotes): Option[Expr[T]] = import quotes.reflect._ - Implicits.search(TypeRepr.of[T]) match { + Implicits.search(TypeRepr.of[T]) match case iss: ImplicitSearchSuccess => Some(iss.tree.asExpr.asInstanceOf[Expr[T]]) case isf: ImplicitSearchFailure => None - } - } -} diff --git a/library/src/scala/quoted/ExprMap.scala b/library/src/scala/quoted/ExprMap.scala index 70af00b16be5..5ca06ce96444 100644 --- a/library/src/scala/quoted/ExprMap.scala +++ b/library/src/scala/quoted/ExprMap.scala @@ -6,23 +6,21 @@ trait ExprMap: def transform[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] /** Map sub-expressions an expression `e` with a type `T` */ - def transformChildren[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = { + def transformChildren[T](e: Expr[T])(using Type[T])(using Quotes): Expr[T] = import quotes.reflect._ - final class MapChildren() { + final class MapChildren(): - def transformStatement(tree: Statement)(owner: Symbol): Statement = { - tree match { + def transformStatement(tree: Statement)(owner: Symbol): Statement = + tree match case tree: Term => transformTerm(tree, TypeRepr.of[Any])(owner) case tree: Definition => transformDefinition(tree)(owner) case tree @ (_:Import | _:Export) => tree - } - } - def transformDefinition(tree: Definition)(owner: Symbol): Definition = { - tree match { + def transformDefinition(tree: Definition)(owner: Symbol): Definition = + tree match case tree: ValDef => val owner = tree.symbol val rhs1 = tree.rhs.map(x => transformTerm(x, tree.tpt.tpe)(owner)) @@ -35,10 +33,8 @@ trait ExprMap: case tree: ClassDef => val newBody = transformStats(tree.body)(owner) ClassDef.copy(tree)(tree.name, tree.constructor, tree.parents, tree.self, newBody) - } - } - def transformTermChildren(tree: Term, tpe: TypeRepr)(owner: Symbol): Term = tree match { + def transformTermChildren(tree: Term, tpe: TypeRepr)(owner: Symbol): Term = tree match case Ident(name) => tree case Select(qualifier, name) => @@ -49,10 +45,9 @@ trait ExprMap: tree case tree @ Apply(fun, args) => val MethodType(_, tpes, _) = fun.tpe.widen: @unchecked - val tpes1 = tpes.map { + val tpes1 = tpes.map: case ByNameType(tpe) => tpe case tpe => tpe - } Apply.copy(tree)(transformTerm(fun, TypeRepr.of[Any])(owner), transformTerms(args, tpes1)(owner)) case TypeApply(fun, args) => TypeApply.copy(tree)(transformTerm(fun, TypeRepr.of[Any])(owner), args) @@ -94,7 +89,6 @@ trait ExprMap: Repeated.copy(tree)(transformTerms(elems, elemtpt.tpe)(owner), elemtpt) case Inlined(call, bindings, expansion) => Inlined.copy(tree)(call, transformDefinitions(bindings)(owner), transformTerm(expansion, tpe)(owner)) - } def transformTerm(tree: Term, tpe: TypeRepr)(owner: Symbol): Term = tree match @@ -148,10 +142,8 @@ trait ExprMap: def transformTypeCaseDefs(trees: List[TypeCaseDef])(owner: Symbol): List[TypeCaseDef] = trees.mapConserve(x => transformTypeCaseDef(x)(owner)) - } new MapChildren() .transformTermChildren(e.asTerm, TypeRepr.of[T])(Symbol.spliceOwner) .asExprOf[T] - } end ExprMap diff --git a/library/src/scala/quoted/FromExpr.scala b/library/src/scala/quoted/FromExpr.scala index f81afc627609..54baacff2351 100644 --- a/library/src/scala/quoted/FromExpr.scala +++ b/library/src/scala/quoted/FromExpr.scala @@ -10,7 +10,7 @@ package scala.quoted * - Calls to `new X` or `X.apply` can be lifted into its value * - Arguments of constructors can be recursively unlifted */ -trait FromExpr[T] { +trait FromExpr[T]: /** Return the value of the expression. * @@ -19,10 +19,9 @@ trait FromExpr[T] { */ def unapply(x: Expr[T])(using Quotes): Option[T] -} /** Default given instances of `FromExpr` */ -object FromExpr { +object FromExpr: /** Default implementation of `FromExpr[Boolean]` * - Transform `'{true}` into `Some(true)` @@ -80,10 +79,10 @@ object FromExpr { given StringFromExpr[T <: String]: FromExpr[T] = new PrimitiveFromExpr /** Lift a quoted primitive value `'{ x }` into `x` */ - private class PrimitiveFromExpr[T <: Boolean | Byte | Short | Int | Long | Float | Double | Char | String] extends FromExpr[T] { + private class PrimitiveFromExpr[T <: Boolean | Byte | Short | Int | Long | Float | Double | Char | String] extends FromExpr[T]: def unapply(expr: Expr[T])(using Quotes) = import quotes.reflect._ - def rec(tree: Term): Option[T] = tree match { + def rec(tree: Term): Option[T] = tree match case Block(stats, e) => if stats.isEmpty then rec(e) else None case Inlined(_, bindings, e) => if bindings.isEmpty then rec(e) else None case Typed(e, _) => rec(e) @@ -91,360 +90,300 @@ object FromExpr { tree.tpe.widenTermRefByName match case ConstantType(c) => Some(c.value.asInstanceOf[T]) case _ => None - } rec(expr.asTerm) - } /** Default implementation of `FromExpr[Option]` * - Transform `'{Some(x)}` into `Some(Some(x))` if `x` can be transformed using `FromExpr[T]` * - Transform `'{None}` into `Some(None)` * - Otherwise returns `None` */ - given OptionFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Option[T]] with { - def unapply(x: Expr[Option[T]])(using Quotes) = x match { + given OptionFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Option[T]] with + def unapply(x: Expr[Option[T]])(using Quotes) = x match case '{ Option[T](${Expr(y)}) } => Some(Option(y)) case '{ None } => Some(None) case '{ ${Expr(opt)} : Some[T] } => Some(opt) case _ => None - } - } /** Default implementation of `FromExpr[None]` * - Transform `'{None}` into `Some(None)` * - Otherwise returns `None` */ - given NoneFromExpr: FromExpr[None.type] with { - def unapply(x: Expr[None.type])(using Quotes) = x match { + given NoneFromExpr: FromExpr[None.type] with + def unapply(x: Expr[None.type])(using Quotes) = x match case '{ None } => Some(None) case _ => None - } - } /** Default implementation of `FromExpr[Some]` * - Transform `'{Some(x)}` into `Some(Some(x))` if `x` can be transformed using `FromExpr[T]` * - Otherwise returns `None` */ - given SomeFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Some[T]] with { - def unapply(x: Expr[Some[T]])(using Quotes) = x match { + given SomeFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Some[T]] with + def unapply(x: Expr[Some[T]])(using Quotes) = x match case '{ new Some[T](${Expr(y)}) } => Some(Some(y)) case '{ Some[T](${Expr(y)}) } => Some(Some(y)) case _ => None - } - } /** Default implementation of `FromExpr[StringContext]` * - Transform `'{StringContext(args: _*)}` into `Some(StringContext(args: _*))` if `args` is explicit and each one is liftable * - Otherwise returns `None` */ - given StringContextFromExpr: FromExpr[StringContext] with { - def unapply(x: Expr[StringContext])(using Quotes) = x match { + given StringContextFromExpr: FromExpr[StringContext] with + def unapply(x: Expr[StringContext])(using Quotes) = x match case '{ new StringContext(${Varargs(Exprs(args))}: _*) } => Some(StringContext(args: _*)) case '{ StringContext(${Varargs(Exprs(args))}: _*) } => Some(StringContext(args: _*)) case _ => None - } - } /** Default implementation of `FromExpr[EmptyTuple]` * - Transform `'{EmptyTuple}` into `Some(EmptyTuple)` * - Otherwise returns `None` */ - given EmptyTupleFromExpr: FromExpr[EmptyTuple.type] with { - def unapply(x: Expr[EmptyTuple.type])(using Quotes) = x match { + given EmptyTupleFromExpr: FromExpr[EmptyTuple.type] with + def unapply(x: Expr[EmptyTuple.type])(using Quotes) = x match case '{ EmptyTuple } => Some(EmptyTuple) case _ => None - } - } /** Default implementation of `FromExpr[Tuple1[...]]` * - Transform `'{Tuple1(x1)}` into `Some(Tuple1(x1))` if `x1` can be transformed using `FromExpr[T]` * - Otherwise returns `None` */ - given Tuple1FromExpr[T1](using Type[T1], FromExpr[T1]): FromExpr[Tuple1[T1]] with { - def unapply(x: Expr[Tuple1[T1]])(using Quotes) = x match { + given Tuple1FromExpr[T1](using Type[T1], FromExpr[T1]): FromExpr[Tuple1[T1]] with + def unapply(x: Expr[Tuple1[T1]])(using Quotes) = x match case '{ new Tuple1[T1](${Expr(y)}) } => Some(Tuple1(y)) case '{ Tuple1[T1](${Expr(y)}) } => Some(Tuple1(y)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple2[...]]` * - Transform `'{Tuple2(x1, x2)}` into `Some(Tuple2(x1, x2))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple2FromExpr[T1, T2](using Type[T1], Type[T2], FromExpr[T1], FromExpr[T2]): FromExpr[Tuple2[T1, T2]] with { - def unapply(x: Expr[Tuple2[T1, T2]])(using Quotes) = x match { + given Tuple2FromExpr[T1, T2](using Type[T1], Type[T2], FromExpr[T1], FromExpr[T2]): FromExpr[Tuple2[T1, T2]] with + def unapply(x: Expr[Tuple2[T1, T2]])(using Quotes) = x match case '{ new Tuple2[T1, T2](${Expr(y1)}, ${Expr(y2)}) } => Some(Tuple2(y1, y2)) case '{ Tuple2[T1, T2](${Expr(y1)}, ${Expr(y2)}) } => Some(Tuple2(y1, y2)) case '{ (${Expr(y1)}: T1) -> (${Expr(y2)}: T2) } => Some(Tuple2(y1, y2)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple3[...]]` * - Transform `'{Tuple3(x1, x2, x3)}` into `Some(Tuple3(x1, x2, x3))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple3FromExpr[T1, T2, T3](using Type[T1], Type[T2], Type[T3], FromExpr[T1], FromExpr[T2], FromExpr[T3]): FromExpr[Tuple3[T1, T2, T3]] with { - def unapply(x: Expr[Tuple3[T1, T2, T3]])(using Quotes) = x match { + given Tuple3FromExpr[T1, T2, T3](using Type[T1], Type[T2], Type[T3], FromExpr[T1], FromExpr[T2], FromExpr[T3]): FromExpr[Tuple3[T1, T2, T3]] with + def unapply(x: Expr[Tuple3[T1, T2, T3]])(using Quotes) = x match case '{ new Tuple3[T1, T2, T3](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}) } => Some(Tuple3(y1, y2, y3)) case '{ Tuple3[T1, T2, T3](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}) } => Some(Tuple3(y1, y2, y3)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple4[...]]` * - Transform `'{Tuple4(x1, ..., x4)}` into `Some(Tuple4(x1, ..., x4))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple4FromExpr[T1, T2, T3, T4](using Type[T1], Type[T2], Type[T3], Type[T4], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4]): FromExpr[Tuple4[T1, T2, T3, T4]] with { - def unapply(x: Expr[Tuple4[T1, T2, T3, T4]])(using Quotes) = x match { + given Tuple4FromExpr[T1, T2, T3, T4](using Type[T1], Type[T2], Type[T3], Type[T4], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4]): FromExpr[Tuple4[T1, T2, T3, T4]] with + def unapply(x: Expr[Tuple4[T1, T2, T3, T4]])(using Quotes) = x match case '{ new Tuple4[T1, T2, T3, T4](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}) } => Some(Tuple4(y1, y2, y3, y4)) case '{ Tuple4[T1, T2, T3, T4](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}) } => Some(Tuple4(y1, y2, y3, y4)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple5[...]]` * - Transform `'{Tuple5(x1, ..., x5)}` into `Some(Tuple5(x1, ..., x5))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple5FromExpr[T1, T2, T3, T4, T5](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5]): FromExpr[Tuple5[T1, T2, T3, T4, T5]] with { - def unapply(x: Expr[Tuple5[T1, T2, T3, T4, T5]])(using Quotes) = x match { + given Tuple5FromExpr[T1, T2, T3, T4, T5](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5]): FromExpr[Tuple5[T1, T2, T3, T4, T5]] with + def unapply(x: Expr[Tuple5[T1, T2, T3, T4, T5]])(using Quotes) = x match case '{ new Tuple5[T1, T2, T3, T4, T5](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}) } => Some(Tuple5(y1, y2, y3, y4, y5)) case '{ Tuple5[T1, T2, T3, T4, T5](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}) } => Some(Tuple5(y1, y2, y3, y4, y5)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple6[...]]` * - Transform `'{Tuple6(x1, ..., x6)}` into `Some(Tuple6(x1, ..., x6))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple6FromExpr[T1, T2, T3, T4, T5, T6](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6]): FromExpr[Tuple6[T1, T2, T3, T4, T5, T6]] with { - def unapply(x: Expr[Tuple6[T1, T2, T3, T4, T5, T6]])(using Quotes) = x match { + given Tuple6FromExpr[T1, T2, T3, T4, T5, T6](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6]): FromExpr[Tuple6[T1, T2, T3, T4, T5, T6]] with + def unapply(x: Expr[Tuple6[T1, T2, T3, T4, T5, T6]])(using Quotes) = x match case '{ new Tuple6[T1, T2, T3, T4, T5, T6](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}) } => Some(Tuple6(y1, y2, y3, y4, y5, y6)) case '{ Tuple6[T1, T2, T3, T4, T5, T6](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}) } => Some(Tuple6(y1, y2, y3, y4, y5, y6)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple7[...]]` * - Transform `'{Tuple7(x1, ..., x7)}` into `Some(Tuple7(x1, ..., x7))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple7FromExpr[T1, T2, T3, T4, T5, T6, T7](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7]): FromExpr[Tuple7[T1, T2, T3, T4, T5, T6, T7]] with { - def unapply(x: Expr[Tuple7[T1, T2, T3, T4, T5, T6, T7]])(using Quotes) = x match { + given Tuple7FromExpr[T1, T2, T3, T4, T5, T6, T7](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7]): FromExpr[Tuple7[T1, T2, T3, T4, T5, T6, T7]] with + def unapply(x: Expr[Tuple7[T1, T2, T3, T4, T5, T6, T7]])(using Quotes) = x match case '{ new Tuple7[T1, T2, T3, T4, T5, T6, T7](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}) } => Some(Tuple7(y1, y2, y3, y4, y5, y6, y7)) case '{ Tuple7[T1, T2, T3, T4, T5, T6, T7](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}) } => Some(Tuple7(y1, y2, y3, y4, y5, y6, y7)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple8[...]]` * - Transform `'{Tuple8(x1, ..., x8)}` into `Some(Tuple8(x1, ..., x8))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple8FromExpr[T1, T2, T3, T4, T5, T6, T7, T8](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8]): FromExpr[Tuple8[T1, T2, T3, T4, T5, T6, T7, T8]] with { - def unapply(x: Expr[Tuple8[T1, T2, T3, T4, T5, T6, T7, T8]])(using Quotes) = x match { + given Tuple8FromExpr[T1, T2, T3, T4, T5, T6, T7, T8](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8]): FromExpr[Tuple8[T1, T2, T3, T4, T5, T6, T7, T8]] with + def unapply(x: Expr[Tuple8[T1, T2, T3, T4, T5, T6, T7, T8]])(using Quotes) = x match case '{ new Tuple8[T1, T2, T3, T4, T5, T6, T7, T8](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}) } => Some(Tuple8(y1, y2, y3, y4, y5, y6, y7, y8)) case '{ Tuple8[T1, T2, T3, T4, T5, T6, T7, T8](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}) } => Some(Tuple8(y1, y2, y3, y4, y5, y6, y7, y8)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple9[...]]` * - Transform `'{Tuple9(x1, ..., x9)}` into `Some(Tuple9(x1, ..., x9))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple9FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9]): FromExpr[Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9]] with { - def unapply(x: Expr[Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9]])(using Quotes) = x match { + given Tuple9FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9]): FromExpr[Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9]] with + def unapply(x: Expr[Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9]])(using Quotes) = x match case '{ new Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}) } => Some(Tuple9(y1, y2, y3, y4, y5, y6, y7, y8, y9)) case '{ Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}) } => Some(Tuple9(y1, y2, y3, y4, y5, y6, y7, y8, y9)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple10[...]]` * - Transform `'{Tuple0(x1, ..., x10)}` into `Some(Tuple0(x1, ..., x10))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple10FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10]): FromExpr[Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]] with { - def unapply(x: Expr[Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]])(using Quotes) = x match { + given Tuple10FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10]): FromExpr[Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]] with + def unapply(x: Expr[Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]])(using Quotes) = x match case '{ new Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}) } => Some(Tuple10(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10)) case '{ Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}) } => Some(Tuple10(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple11[...]]` * - Transform `'{Tuple1(x1, ..., x11)}` into `Some(Tuple1(x1, ..., x11))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple11FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11]): FromExpr[Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]] with { - def unapply(x: Expr[Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]])(using Quotes) = x match { + given Tuple11FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11]): FromExpr[Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]] with + def unapply(x: Expr[Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]])(using Quotes) = x match case '{ new Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}) } => Some(Tuple11(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11)) case '{ Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}) } => Some(Tuple11(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple12[...]]` * - Transform `'{Tuple2(x1, ..., x12)}` into `Some(Tuple2(x1, ..., x12))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple12FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12]): FromExpr[Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]] with { - def unapply(x: Expr[Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]])(using Quotes) = x match { + given Tuple12FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12]): FromExpr[Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]] with + def unapply(x: Expr[Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]])(using Quotes) = x match case '{ new Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}) } => Some(Tuple12(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12)) case '{ Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}) } => Some(Tuple12(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple13[...]]` * - Transform `'{Tuple3(x1, ..., x13)}` into `Some(Tuple3(x1, ..., x13))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple13FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13]): FromExpr[Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]] with { - def unapply(x: Expr[Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]])(using Quotes) = x match { + given Tuple13FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13]): FromExpr[Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]] with + def unapply(x: Expr[Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]])(using Quotes) = x match case '{ new Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}) } => Some(Tuple13(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13)) case '{ Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}) } => Some(Tuple13(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple14[...]]` * - Transform `'{Tuple4(x1, ..., x14)}` into `Some(Tuple4(x1, ..., x14))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple14FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14]): FromExpr[Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]] with { - def unapply(x: Expr[Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]])(using Quotes) = x match { + given Tuple14FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14]): FromExpr[Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]] with + def unapply(x: Expr[Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]])(using Quotes) = x match case '{ new Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}) } => Some(Tuple14(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14)) case '{ Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}) } => Some(Tuple14(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple15[...]]` * - Transform `'{Tuple5(x1, ..., x15)}` into `Some(Tuple5(x1, ..., x15))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple15FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15]): FromExpr[Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]] with { - def unapply(x: Expr[Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]])(using Quotes) = x match { + given Tuple15FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15]): FromExpr[Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]] with + def unapply(x: Expr[Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]])(using Quotes) = x match case '{ new Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}) } => Some(Tuple15(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15)) case '{ Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}) } => Some(Tuple15(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple16[...]]` * - Transform `'{Tuple6(x1, ..., x16)}` into `Some(Tuple6(x1, ..., x16))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple16FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16]): FromExpr[Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]] with { - def unapply(x: Expr[Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]])(using Quotes) = x match { + given Tuple16FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16]): FromExpr[Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]] with + def unapply(x: Expr[Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]])(using Quotes) = x match case '{ new Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}) } => Some(Tuple16(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16)) case '{ Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}) } => Some(Tuple16(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple17[...]]` * - Transform `'{Tuple7(x1, ..., x17)}` into `Some(Tuple7(x1, ..., x17))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple17FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17]): FromExpr[Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]] with { - def unapply(x: Expr[Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]])(using Quotes) = x match { + given Tuple17FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17]): FromExpr[Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]] with + def unapply(x: Expr[Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]])(using Quotes) = x match case '{ new Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}) } => Some(Tuple17(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17)) case '{ Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}) } => Some(Tuple17(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple18[...]]` * - Transform `'{Tuple8(x1, ..., x18)}` into `Some(Tuple8(x1, ..., x18))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple18FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], Type[T18], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17], FromExpr[T18]): FromExpr[Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]] with { - def unapply(x: Expr[Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]])(using Quotes) = x match { + given Tuple18FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], Type[T18], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17], FromExpr[T18]): FromExpr[Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]] with + def unapply(x: Expr[Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]])(using Quotes) = x match case '{ new Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}, ${Expr(y18)}) } => Some(Tuple18(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17, y18)) case '{ Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}, ${Expr(y18)}) } => Some(Tuple18(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17, y18)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple19[...]]` * - Transform `'{Tuple9(x1, ..., x19)}` into `Some(Tuple9(x1, ..., x19))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple19FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], Type[T18], Type[T19], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17], FromExpr[T18], FromExpr[T19]): FromExpr[Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]] with { - def unapply(x: Expr[Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]])(using Quotes) = x match { + given Tuple19FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], Type[T18], Type[T19], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17], FromExpr[T18], FromExpr[T19]): FromExpr[Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]] with + def unapply(x: Expr[Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]])(using Quotes) = x match case '{ new Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}, ${Expr(y18)}, ${Expr(y19)}) } => Some(Tuple19(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17, y18, y19)) case '{ Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}, ${Expr(y18)}, ${Expr(y19)}) } => Some(Tuple19(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17, y18, y19)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple20[...]]` * - Transform `'{Tuple0(x1, ..., x20)}` into `Some(Tuple0(x1, ..., x20))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple20FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], Type[T18], Type[T19], Type[T20], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17], FromExpr[T18], FromExpr[T19], FromExpr[T20]): FromExpr[Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]] with { - def unapply(x: Expr[Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]])(using Quotes) = x match { + given Tuple20FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], Type[T18], Type[T19], Type[T20], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17], FromExpr[T18], FromExpr[T19], FromExpr[T20]): FromExpr[Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]] with + def unapply(x: Expr[Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]])(using Quotes) = x match case '{ new Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}, ${Expr(y18)}, ${Expr(y19)}, ${Expr(y20)}) } => Some(Tuple20(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17, y18, y19, y20)) case '{ Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}, ${Expr(y18)}, ${Expr(y19)}, ${Expr(y20)}) } => Some(Tuple20(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17, y18, y19, y20)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple21[...]]` * - Transform `'{Tuple1(x1, ..., x21)}` into `Some(Tuple1(x1, ..., x21))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple21FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], Type[T18], Type[T19], Type[T20], Type[T21], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17], FromExpr[T18], FromExpr[T19], FromExpr[T20], FromExpr[T21]): FromExpr[Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]] with { - def unapply(x: Expr[Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]])(using Quotes) = x match { + given Tuple21FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], Type[T18], Type[T19], Type[T20], Type[T21], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17], FromExpr[T18], FromExpr[T19], FromExpr[T20], FromExpr[T21]): FromExpr[Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]] with + def unapply(x: Expr[Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]])(using Quotes) = x match case '{ new Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}, ${Expr(y18)}, ${Expr(y19)}, ${Expr(y20)}, ${Expr(y21)}) } => Some(Tuple21(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17, y18, y19, y20, y21)) case '{ Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}, ${Expr(y18)}, ${Expr(y19)}, ${Expr(y20)}, ${Expr(y21)}) } => Some(Tuple21(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17, y18, y19, y20, y21)) case _ => None - } - } /** Default implementation of `FromExpr[Tuple22[...]]` * - Transform `'{Tuple2(x1, ..., x22)}` into `Some(Tuple2(x1, ..., x22))` if all `xi` can be transformed using `FromExpr[Ti]` * - Otherwise returns `None` */ - given Tuple22FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], Type[T18], Type[T19], Type[T20], Type[T21], Type[T22], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17], FromExpr[T18], FromExpr[T19], FromExpr[T20], FromExpr[T21], FromExpr[T22]): FromExpr[Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]] with { - def unapply(x: Expr[Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]])(using Quotes) = x match { + given Tuple22FromExpr[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](using Type[T1], Type[T2], Type[T3], Type[T4], Type[T5], Type[T6], Type[T7], Type[T8], Type[T9], Type[T10], Type[T11], Type[T12], Type[T13], Type[T14], Type[T15], Type[T16], Type[T17], Type[T18], Type[T19], Type[T20], Type[T21], Type[T22], FromExpr[T1], FromExpr[T2], FromExpr[T3], FromExpr[T4], FromExpr[T5], FromExpr[T6], FromExpr[T7], FromExpr[T8], FromExpr[T9], FromExpr[T10], FromExpr[T11], FromExpr[T12], FromExpr[T13], FromExpr[T14], FromExpr[T15], FromExpr[T16], FromExpr[T17], FromExpr[T18], FromExpr[T19], FromExpr[T20], FromExpr[T21], FromExpr[T22]): FromExpr[Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]] with + def unapply(x: Expr[Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]])(using Quotes) = x match case '{ new Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}, ${Expr(y18)}, ${Expr(y19)}, ${Expr(y20)}, ${Expr(y21)}, ${Expr(y22)}) } => Some(Tuple22(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17, y18, y19, y20, y21, y22)) case '{ Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22](${Expr(y1)}, ${Expr(y2)}, ${Expr(y3)}, ${Expr(y4)}, ${Expr(y5)}, ${Expr(y6)}, ${Expr(y7)}, ${Expr(y8)}, ${Expr(y9)}, ${Expr(y10)}, ${Expr(y11)}, ${Expr(y12)}, ${Expr(y13)}, ${Expr(y14)}, ${Expr(y15)}, ${Expr(y16)}, ${Expr(y17)}, ${Expr(y18)}, ${Expr(y19)}, ${Expr(y20)}, ${Expr(y21)}, ${Expr(y22)}) } => Some(Tuple22(y1, y2, y3, y4, y5, y6, y7, y8, y9, y10, y11, y12, y13, y14, y15, y16, y17, y18, y19, y20, y21, y22)) case _ => None - } - } /** Default implementation of `FromExpr[Seq]` * - Transform `'{Seq(x1, ..., xn)}` into `Some(Seq(x1, ..., xn))` if all `xi` can be transformed using `FromExpr[Ti]` * - Transform sequences that come out of varargs * - Otherwise returns `None` */ - given SeqFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Seq[T]] with { - def unapply(x: Expr[Seq[T]])(using Quotes) = x match { + given SeqFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Seq[T]] with + def unapply(x: Expr[Seq[T]])(using Quotes) = x match case Varargs(Exprs(elems)) => Some(elems) case '{ scala.Seq[T](${Varargs(Exprs(elems))}: _*) } => Some(elems) case '{ scala.collection.immutable.Seq[T](${Varargs(Exprs(elems))}: _*) } => Some(elems) case '{ ${Expr(x)}: List[T] } => Some(x) case _ => None - } - } /** Default implementation of `FromExpr[Nil]` * - Transform `'{Nil}` into `Some(Nil)` * - Otherwise returns `None` */ - given NilFromExpr: FromExpr[Nil.type] with { - def unapply(x: Expr[Nil.type])(using Quotes) = x match { + given NilFromExpr: FromExpr[Nil.type] with + def unapply(x: Expr[Nil.type])(using Quotes) = x match case '{ scala.Nil } | '{ scala.collection.immutable.Nil } => Some(Nil) case _ => None - } - } /** Default implementation of `FromExpr[List]` * - Transform `'{List(x1, ..., xn)}` into `Some(List(x1, ..., xn))` if all `xi` can be transformed using `FromExpr[Ti]` @@ -452,80 +391,67 @@ object FromExpr { * - Transform `'{Nil}` into `Some(Nil)` * - Otherwise returns `None` */ - given ListFromExpr[T](using Type[T], FromExpr[T]): FromExpr[List[T]] with { - def unapply(x: Expr[List[T]])(using Quotes) = x match { + given ListFromExpr[T](using Type[T], FromExpr[T]): FromExpr[List[T]] with + def unapply(x: Expr[List[T]])(using Quotes) = x match case '{ scala.List[T](${Varargs(Exprs(elems))}: _*) } => Some(elems.toList) case '{ scala.List.empty[T] } => Some(Nil) case '{ Nil } => Some(Nil) case '{ scala.collection.immutable.List[T](${Varargs(Exprs(elems))}: _*) } => Some(elems.toList) case '{ scala.collection.immutable.List.empty[T] } => Some(Nil) case _ => None - } - } /** Default implementation of `FromExpr[Set]` * - Transform `'{Set(x1, ..., xn)}` into `Some(Set(x1, ..., xn))` if all `xi` can be transformed using `FromExpr[Ti]` * - Transform `'{Set.empty}` into `Some(Set())` * - Otherwise returns `None` */ - given SetFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Set[T]] with { - def unapply(x: Expr[Set[T]])(using Quotes) = x match { + given SetFromExpr[T](using Type[T], FromExpr[T]): FromExpr[Set[T]] with + def unapply(x: Expr[Set[T]])(using Quotes) = x match case '{ Set[T](${Varargs(Exprs(elems))}: _*) } => Some(elems.toSet) case '{ Set.empty[T] } => Some(Set.empty[T]) case '{ scala.collection.immutable.Set[T](${Varargs(Exprs(elems))}: _*) } => Some(elems.toSet) case '{ scala.collection.immutable.Set.empty[T] } => Some(Set.empty[T]) case _ => None - } - } /** Default implementation of `FromExpr[Map]` * - Transform `'{Map(x1, ..., xn)}` into `Some(Map(x1, ..., xn))` if all `xi` can be transformed using `FromExpr[Ti]` * - Transform `'{Map.empty}` into `Some(Map())` * - Otherwise returns `None` */ - given MapFromExpr[T, U](using Type[T], Type[U], FromExpr[T], FromExpr[U]): FromExpr[Map[T, U]] with { - def unapply(x: Expr[Map[T, U]])(using Quotes) = x match { + given MapFromExpr[T, U](using Type[T], Type[U], FromExpr[T], FromExpr[U]): FromExpr[Map[T, U]] with + def unapply(x: Expr[Map[T, U]])(using Quotes) = x match case '{ Map[T, U](${Varargs(Exprs(elems))}: _*) } => Some(elems.toMap) case '{ Map.empty[T, U] } => Some(Map.empty) case '{ scala.collection.immutable.Map[T, U](${Varargs(Exprs(elems))}: _*) } => Some(elems.toMap) case '{ scala.collection.immutable.Map.empty[T, U] } => Some(Map.empty) case _ => None - } - } /** Default implementation of `FromExpr[Either]` * - Transform `'{Left(x)}` into `Some(Left(x))` if `x` can be transformed using `FromExpr[L]` * - Transform `'{Right(x)}` into `Some(Right(x))` if `x` can be transformed using `FromExpr[R]` * - Otherwise returns `None` */ - given EitherFromExpr[L, R](using Type[L], Type[R], FromExpr[L], FromExpr[R]): FromExpr[Either[L, R]] with { - def unapply(x: Expr[Either[L, R]])(using Quotes) = x match { + given EitherFromExpr[L, R](using Type[L], Type[R], FromExpr[L], FromExpr[R]): FromExpr[Either[L, R]] with + def unapply(x: Expr[Either[L, R]])(using Quotes) = x match case '{ $x: Left[L, R] } => x.value case '{ $x: Right[L, R] } => x.value case _ => None - } - } /** Default implementation of `FromExpr[Left]` * - Transform `'{Left(x)}` into `Some(Left(x))` if `x` can be transformed using `FromExpr[L]` * - Otherwise returns `None` */ - given LeftFromExpr[L, R](using Type[L], Type[R], FromExpr[L]): FromExpr[Left[L, R]] with { - def unapply(x: Expr[Left[L, R]])(using Quotes) = x match { + given LeftFromExpr[L, R](using Type[L], Type[R], FromExpr[L]): FromExpr[Left[L, R]] with + def unapply(x: Expr[Left[L, R]])(using Quotes) = x match case '{ Left[L, R](${Expr(x)}) } => Some(Left(x)) case _ => None - } - } /** Default implementation of `FromExpr[Right]` * - Transform `'{Right(x)}` into `Some(Right(x))` if `x` can be transformed using `FromExpr[R]` * - Otherwise returns `None` */ - given RightFromExpr[L, R](using Type[L], Type[R], FromExpr[R]): FromExpr[Right[L, R]] with { - def unapply(x: Expr[Right[L, R]])(using Quotes) = x match { + given RightFromExpr[L, R](using Type[L], Type[R], FromExpr[R]): FromExpr[Right[L, R]] with + def unapply(x: Expr[Right[L, R]])(using Quotes) = x match case '{ Right[L, R](${Expr(x)}) } => Some(Right(x)) case _ => None - } - } -} diff --git a/library/src/scala/quoted/Quotes.scala b/library/src/scala/quoted/Quotes.scala index c7d5719b0e1f..26eb4eb2bd18 100644 --- a/library/src/scala/quoted/Quotes.scala +++ b/library/src/scala/quoted/Quotes.scala @@ -287,7 +287,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => given TreeMethods: TreeMethods /** Extension methods of `Tree` */ - trait TreeMethods { + trait TreeMethods: extension (self: Tree) /** Position in the source code */ @@ -315,7 +315,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def changeOwner(newOwner: Symbol): ThisTree end extension - } /** Tree representing a package clause in the source code * @@ -768,7 +767,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => given TermMethods: TermMethods /** Extension methods of `Term` */ - trait TermMethods { + trait TermMethods: extension (self: Term) /** TypeRepr of this term */ @@ -824,7 +823,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => end extension - } /** Tree representing a reference to definition */ type Ref <: Term @@ -2572,7 +2570,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => given TypeReprMethods: TypeReprMethods /** Extension methods of `TypeRepr` */ - trait TypeReprMethods { + trait TypeReprMethods: extension (self: TypeRepr) /** Shows the type as a String */ @@ -2713,7 +2711,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** The applied type arguments (empty if there is no such arguments) */ def typeArgs: List[TypeRepr] end extension - } /** A singleton type representing a known constant value */ type ConstantType <: TypeRepr @@ -3338,7 +3335,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => given ConstantMethods: ConstantMethods /** Extension methods of `Constant` */ - trait ConstantMethods { + trait ConstantMethods: extension (self: Constant) /** Returns the value of the constant */ def value: Any @@ -3347,7 +3344,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def show(using Printer[Constant]): String end extension - } /** Constant Boolean value */ type BooleanConstant <: Constant @@ -3846,7 +3842,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => given SymbolMethods: SymbolMethods /** Extension methods of `Symbol` */ - trait SymbolMethods { + trait SymbolMethods: extension (self: Symbol) /** Owner of this symbol. The owner is the symbol in which this symbol is defined. Throws if this symbol does not have an owner. */ @@ -4123,7 +4119,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => */ def termRef: TermRef end extension - } //////////////// // SIGNATURES // @@ -4145,7 +4140,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => given SignatureMethods: SignatureMethods /** Extension methods of `Signature` */ - trait SignatureMethods { + trait SignatureMethods: extension (self: Signature) /** The signatures of the method parameters. @@ -4161,7 +4156,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def resultSig: String end extension - } ////////////////////////// // STANDARD DEFINITIONS // @@ -4526,7 +4520,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => given FlagsMethods: FlagsMethods /** Extension methods of `Flags` */ - trait FlagsMethods { + trait FlagsMethods: extension (self: Flags) /** Is the given flag set a subset of this flag sets */ def is(that: Flags): Boolean @@ -4541,7 +4535,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def show: String end extension - } /////////////// @@ -4568,7 +4561,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => given PositionMethods: PositionMethods /** Extension methods of `Position` */ - trait PositionMethods { + trait PositionMethods: extension (self: Position) /** The start offset in the source file */ @@ -4596,7 +4589,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def sourceCode: Option[String] end extension - } /** Scala source file */ type SourceFile <: AnyRef @@ -4614,7 +4606,7 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => given SourceFileMethods: SourceFileMethods /** Extension methods of `SourceFile` */ - trait SourceFileMethods { + trait SourceFileMethods: extension (self: SourceFile) /** Path to this source file. May be `null` for virtual files such as in the REPL. */ @deprecated("Use getJPath, name, or path instead of jpath", "3.0.2") @@ -4636,7 +4628,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => /** Content of this source file */ def content: Option[String] end extension - } /////////////// // REPORTING // @@ -4727,8 +4718,8 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => def foldTrees(x: X, trees: Iterable[Tree])(owner: Symbol): X = trees.foldLeft(x)((acc, y) => foldTree(acc, y)(owner)) - def foldOverTree(x: X, tree: Tree)(owner: Symbol): X = { - tree match { + def foldOverTree(x: X, tree: Tree)(owner: Symbol): X = + tree match case Ident(_) => x case Select(qualifier, _) => @@ -4812,8 +4803,6 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => case Alternatives(patterns) => foldTrees(x, patterns)(owner) case SummonFrom(cases) => foldTrees(x, cases)(owner) case _ => throw MatchError(tree.show(using Printer.TreeStructure)) - } - } end TreeAccumulator @@ -4864,8 +4853,8 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => */ trait TreeMap: - def transformTree(tree: Tree)(owner: Symbol): Tree = { - tree match { + def transformTree(tree: Tree)(owner: Symbol): Tree = + tree match case tree: PackageClause => PackageClause.copy(tree)(transformTerm(tree.pid)(owner).asInstanceOf[Ref], transformTrees(tree.stats)(tree.symbol)) case tree: Import => @@ -4892,11 +4881,9 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => TypedOrTest.copy(tree)(transformTree(inner)(owner), transformTypeTree(tpt)(owner)) case _ => throw MatchError(tree.show(using Printer.TreeStructure)) - } - } - def transformStatement(tree: Statement)(owner: Symbol): Statement = { - tree match { + def transformStatement(tree: Statement)(owner: Symbol): Statement = + tree match case tree: Term => transformTerm(tree)(owner) case tree: ValDef => @@ -4906,10 +4893,9 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => ValDef.copy(tree)(tree.name, tpt1, rhs1) case tree: DefDef => val owner = tree.symbol - val newParamClauses = tree.paramss.mapConserve { + val newParamClauses = tree.paramss.mapConserve: case TypeParamClause(params) => TypeParamClause(transformSubTrees(params)(owner)) case TermParamClause(params) => TermParamClause(transformSubTrees(params)(owner)) - } DefDef.copy(tree)(tree.name, newParamClauses, transformTypeTree(tree.returnTpt)(owner), tree.rhs.map(x => transformTerm(x)(owner))) case tree: TypeDef => val owner = tree.symbol @@ -4929,11 +4915,9 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => tree case _ => throw MatchError(tree.show(using Printer.TreeStructure)) - } - } - def transformTerm(tree: Term)(owner: Symbol): Term = { - tree match { + def transformTerm(tree: Term)(owner: Symbol): Term = + tree match case Ident(name) => tree case Select(qualifier, name) => @@ -4978,10 +4962,8 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => SummonFrom.copy(tree)(transformCaseDefs(cases)(owner)) case _ => throw MatchError(tree.show(using Printer.TreeStructure)) - } - } - def transformTypeTree(tree: TypeTree)(owner: Symbol): TypeTree = tree match { + def transformTypeTree(tree: TypeTree)(owner: Symbol): TypeTree = tree match case Inferred() => tree case tree: TypeIdent => tree case tree: TypeSelect => @@ -5008,15 +4990,12 @@ trait Quotes { self: runtime.QuoteUnpickler & runtime.QuoteMatching => TypeBlock.copy(tree)(tree.aliases, tree.tpt) case _ => throw MatchError(tree.show(using Printer.TreeStructure)) - } - def transformCaseDef(tree: CaseDef)(owner: Symbol): CaseDef = { + def transformCaseDef(tree: CaseDef)(owner: Symbol): CaseDef = CaseDef.copy(tree)(transformTree(tree.pattern)(owner), tree.guard.map(x => transformTerm(x)(owner)), transformTerm(tree.rhs)(owner)) - } - def transformTypeCaseDef(tree: TypeCaseDef)(owner: Symbol): TypeCaseDef = { + def transformTypeCaseDef(tree: TypeCaseDef)(owner: Symbol): TypeCaseDef = TypeCaseDef.copy(tree)(transformTypeTree(tree.pattern)(owner), transformTypeTree(tree.rhs)(owner)) - } def transformStats(trees: List[Statement])(owner: Symbol): List[Statement] = trees mapConserve (x => transformStatement(x)(owner)) diff --git a/library/src/scala/quoted/ToExpr.scala b/library/src/scala/quoted/ToExpr.scala index d7e55a5bc411..64d8f58b644f 100644 --- a/library/src/scala/quoted/ToExpr.scala +++ b/library/src/scala/quoted/ToExpr.scala @@ -5,431 +5,350 @@ import scala.reflect.ClassTag /** A type class for types that can convert a value of `T` into `quoted.Expr[T]` * an expression that will create a copy of the value. */ -trait ToExpr[T] { +trait ToExpr[T]: /** Lift a value into an expression containing the construction of that value */ def apply(x: T)(using Quotes): Expr[T] -} /** Default given instances of `ToExpr` */ -object ToExpr { +object ToExpr: // IMPORTANT Keep in sync with tests/run-staging/liftables.scala /** Default implementation of `ToExpr[Boolean]` */ - given BooleanToExpr[T <: Boolean]: ToExpr[T] with { + given BooleanToExpr[T <: Boolean]: ToExpr[T] with def apply(x: T)(using Quotes) = import quotes.reflect._ Literal(BooleanConstant(x)).asExpr.asInstanceOf[Expr[T]] - } /** Default implementation of `ToExpr[Byte]` */ - given ByteToExpr[T <: Byte]: ToExpr[T] with { + given ByteToExpr[T <: Byte]: ToExpr[T] with def apply(x: T)(using Quotes) = import quotes.reflect._ Literal(ByteConstant(x)).asExpr.asInstanceOf[Expr[T]] - } /** Default implementation of `ToExpr[Short]` */ - given ShortToExpr[T <: Short]: ToExpr[T] with { + given ShortToExpr[T <: Short]: ToExpr[T] with def apply(x: T)(using Quotes) = import quotes.reflect._ Literal(ShortConstant(x)).asExpr.asInstanceOf[Expr[T]] - } /** Default implementation of `ToExpr[Int]` */ - given IntToExpr[T <: Int]: ToExpr[T] with { + given IntToExpr[T <: Int]: ToExpr[T] with def apply(x: T)(using Quotes) = import quotes.reflect._ Literal(IntConstant(x)).asExpr.asInstanceOf[Expr[T]] - } /** Default implementation of `ToExpr[Long]` */ - given LongToExpr[T <: Long]: ToExpr[T] with { + given LongToExpr[T <: Long]: ToExpr[T] with def apply(x: T)(using Quotes) = import quotes.reflect._ Literal(LongConstant(x)).asExpr.asInstanceOf[Expr[T]] - } /** Default implementation of `ToExpr[Float]` */ - given FloatToExpr[T <: Float]: ToExpr[T] with { + given FloatToExpr[T <: Float]: ToExpr[T] with def apply(x: T)(using Quotes) = import quotes.reflect._ Literal(FloatConstant(x)).asExpr.asInstanceOf[Expr[T]] - } /** Default implementation of `ToExpr[Double]` */ - given DoubleToExpr[T <: Double]: ToExpr[T] with { + given DoubleToExpr[T <: Double]: ToExpr[T] with def apply(x: T)(using Quotes) = import quotes.reflect._ Literal(DoubleConstant(x)).asExpr.asInstanceOf[Expr[T]] - } /** Default implementation of `ToExpr[Char]` */ - given CharToExpr[T <: Char]: ToExpr[T] with { + given CharToExpr[T <: Char]: ToExpr[T] with def apply(x: T)(using Quotes) = import quotes.reflect._ Literal(CharConstant(x)).asExpr.asInstanceOf[Expr[T]] - } /** Default implementation of `ToExpr[String]` */ - given StringToExpr[T <: String]: ToExpr[T] with { + given StringToExpr[T <: String]: ToExpr[T] with def apply(x: T)(using Quotes) = import quotes.reflect._ Literal(StringConstant(x)).asExpr.asInstanceOf[Expr[T]] - } /** Default implementation of `ToExpr[Class[T]]` */ - given ClassToExpr[T <: Class[?]]: ToExpr[T] with { - def apply(x: T)(using Quotes) = { + given ClassToExpr[T <: Class[?]]: ToExpr[T] with + def apply(x: T)(using Quotes) = import quotes.reflect._ Ref(defn.Predef_classOf).appliedToType(TypeRepr.typeConstructorOf(x)).asExpr.asInstanceOf[Expr[T]] - } - } /** Default implementation of `ToExpr[ClassTag[T]]` */ - given ClassTagToExpr[T: Type]: ToExpr[ClassTag[T]] with { + given ClassTagToExpr[T: Type]: ToExpr[ClassTag[T]] with def apply(ct: ClassTag[T])(using Quotes): Expr[ClassTag[T]] = '{ ClassTag[T](${Expr(ct.runtimeClass.asInstanceOf[Class[T]])}) } - } /** Default implementation of `ToExpr[Array[T]]` */ - given ArrayToExpr[T: Type: ToExpr: ClassTag]: ToExpr[Array[T]] with { + given ArrayToExpr[T: Type: ToExpr: ClassTag]: ToExpr[Array[T]] with def apply(arr: Array[T])(using Quotes): Expr[Array[T]] = '{ Array[T](${Expr(arr.toSeq)}: _*)(${Expr(summon[ClassTag[T]])}) } - } /** Default implementation of `ToExpr[Array[Boolean]]` */ - given ArrayOfBooleanToExpr: ToExpr[Array[Boolean]] with { + given ArrayOfBooleanToExpr: ToExpr[Array[Boolean]] with def apply(array: Array[Boolean])(using Quotes): Expr[Array[Boolean]] = if (array.length == 0) '{ Array.emptyBooleanArray } else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) } - } /** Default implementation of `ToExpr[Array[Byte]]` */ - given ArrayOfByteToExpr: ToExpr[Array[Byte]] with { + given ArrayOfByteToExpr: ToExpr[Array[Byte]] with def apply(array: Array[Byte])(using Quotes): Expr[Array[Byte]] = if (array.length == 0) '{ Array.emptyByteArray } else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) } - } /** Default implementation of `ToExpr[Array[Short]]` */ - given ArrayOfShortToExpr: ToExpr[Array[Short]] with { + given ArrayOfShortToExpr: ToExpr[Array[Short]] with def apply(array: Array[Short])(using Quotes): Expr[Array[Short]] = if (array.length == 0) '{ Array.emptyShortArray } else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) } - } /** Default implementation of `ToExpr[Array[Char]]` */ - given ArrayOfCharToExpr: ToExpr[Array[Char]] with { + given ArrayOfCharToExpr: ToExpr[Array[Char]] with def apply(array: Array[Char])(using Quotes): Expr[Array[Char]] = if (array.length == 0) '{ Array.emptyCharArray } else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) } - } /** Default implementation of `ToExpr[Array[Int]]` */ - given ArrayOfIntToExpr: ToExpr[Array[Int]] with { + given ArrayOfIntToExpr: ToExpr[Array[Int]] with def apply(array: Array[Int])(using Quotes): Expr[Array[Int]] = if (array.length == 0) '{ Array.emptyIntArray } else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) } - } /** Default implementation of `ToExpr[Array[Long]]` */ - given ArrayOfLongToExpr: ToExpr[Array[Long]] with { + given ArrayOfLongToExpr: ToExpr[Array[Long]] with def apply(array: Array[Long])(using Quotes): Expr[Array[Long]] = if (array.length == 0) '{ Array.emptyLongArray } else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) } - } /** Default implementation of `ToExpr[Array[Float]]` */ - given ArrayOfFloatToExpr: ToExpr[Array[Float]] with { + given ArrayOfFloatToExpr: ToExpr[Array[Float]] with def apply(array: Array[Float])(using Quotes): Expr[Array[Float]] = if (array.length == 0) '{ Array.emptyFloatArray } else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) } - } /** Default implementation of `ToExpr[Array[Double]]` */ - given ArrayOfDoubleToExpr: ToExpr[Array[Double]] with { + given ArrayOfDoubleToExpr: ToExpr[Array[Double]] with def apply(array: Array[Double])(using Quotes): Expr[Array[Double]] = if (array.length == 0) '{ Array.emptyDoubleArray } else '{ Array(${Expr(array(0))}, ${Expr(array.toSeq.tail)}: _*) } - } /** Default implementation of `ToExpr[IArray[T]]` */ - given IArrayToExpr[T: Type](using ltArray: ToExpr[Array[T]]): ToExpr[IArray[T]] with { + given IArrayToExpr[T: Type](using ltArray: ToExpr[Array[T]]): ToExpr[IArray[T]] with def apply(iarray: IArray[T])(using Quotes): Expr[IArray[T]] = '{ ${ltArray.apply(iarray.asInstanceOf[Array[T]])}.asInstanceOf[IArray[T]] } - } /** Default implementation of `ToExpr[Seq[T]]` */ - given SeqToExpr[T: Type: ToExpr]: ToExpr[Seq[T]] with { + given SeqToExpr[T: Type: ToExpr]: ToExpr[Seq[T]] with def apply(xs: Seq[T])(using Quotes): Expr[Seq[T]] = Expr.ofSeq(xs.map(summon[ToExpr[T]].apply)) - } /** Default implementation of `ToExpr[List[T]]` */ - given ListToExpr[T: Type: ToExpr]: ToExpr[List[T]] with { + given ListToExpr[T: Type: ToExpr]: ToExpr[List[T]] with def apply(xs: List[T])(using Quotes): Expr[List[T]] = Expr.ofList(xs.map(summon[ToExpr[T]].apply)) - } /** Default implementation of `ToExpr[Nil.type]` */ - given NilToExpr: ToExpr[Nil.type] with { + given NilToExpr: ToExpr[Nil.type] with def apply(xs: Nil.type)(using Quotes): Expr[Nil.type] = '{ Nil } - } /** Default implementation of `ToExpr[Set[T]]` */ - given SetToExpr[T: Type: ToExpr]: ToExpr[Set[T]] with { + given SetToExpr[T: Type: ToExpr]: ToExpr[Set[T]] with def apply(set: Set[T])(using Quotes): Expr[Set[T]] = '{ Set(${Expr(set.toSeq)}: _*) } - } /** Default implementation of `ToExpr[Map[T, U]]` */ - given MapToExpr[T: Type: ToExpr, U: Type: ToExpr]: ToExpr[Map[T, U]] with { + given MapToExpr[T: Type: ToExpr, U: Type: ToExpr]: ToExpr[Map[T, U]] with def apply(map: Map[T, U])(using Quotes): Expr[Map[T, U]] = '{ Map(${Expr(map.toSeq)}: _*) } - } /** Default implementation of `ToExpr[Option[T]]` */ - given OptionToExpr[T: Type: ToExpr]: ToExpr[Option[T]] with { - def apply(x: Option[T])(using Quotes): Expr[Option[T]] = x match { + given OptionToExpr[T: Type: ToExpr]: ToExpr[Option[T]] with + def apply(x: Option[T])(using Quotes): Expr[Option[T]] = x match case x: Some[T] => Expr(x) case None => Expr(None) - } - } /** Default implementation of `ToExpr[Some[T]]` */ - given SomeToExpr[T: Type: ToExpr]: ToExpr[Some[T]] with { + given SomeToExpr[T: Type: ToExpr]: ToExpr[Some[T]] with def apply(x: Some[T])(using Quotes): Expr[Some[T]] = '{ Some[T](${Expr(x.get)}) } - } /** Default implementation of `ToExpr[None.type]` */ - given NoneToExpr: ToExpr[None.type] with { + given NoneToExpr: ToExpr[None.type] with def apply(x: None.type)(using Quotes): Expr[None.type] = '{ None } - } /** Default implementation of `ToExpr[Either[L, R]]` */ - given EitherToExpr[L: Type: ToExpr, R: Type: ToExpr]: ToExpr[Either[L, R]] with { + given EitherToExpr[L: Type: ToExpr, R: Type: ToExpr]: ToExpr[Either[L, R]] with def apply(x: Either[L, R])(using Quotes): Expr[Either[L, R]] = x match case x: Left[L, R] => Expr(x) case x: Right[L, R] => Expr(x) - } /** Default implementation of `ToExpr[Left[L, R]]` */ - given LeftToExpr[L: Type: ToExpr, R: Type]: ToExpr[Left[L, R]] with { + given LeftToExpr[L: Type: ToExpr, R: Type]: ToExpr[Left[L, R]] with def apply(x: Left[L, R])(using Quotes): Expr[Left[L, R]] = '{ Left[L, R](${Expr(x.value)}) } - } /** Default implementation of `ToExpr[Right[L, R]]` */ - given RightToExpr[L: Type, R: Type: ToExpr]: ToExpr[Right[L, R]] with { + given RightToExpr[L: Type, R: Type: ToExpr]: ToExpr[Right[L, R]] with def apply(x: Right[L, R])(using Quotes): Expr[Right[L, R]] = '{ Right[L, R](${Expr(x.value)}) } - } /** Default implementation of `ToExpr[EmptyTuple.type]` */ - given EmptyTupleToExpr: ToExpr[EmptyTuple.type] with { + given EmptyTupleToExpr: ToExpr[EmptyTuple.type] with def apply(tup: EmptyTuple.type)(using Quotes) = '{ EmptyTuple } - } /** Default implementation of `ToExpr[Tuple1[T1]]` */ - given Tuple1ToExpr[T1: Type: ToExpr]: ToExpr[Tuple1[T1]] with { + given Tuple1ToExpr[T1: Type: ToExpr]: ToExpr[Tuple1[T1]] with def apply(tup: Tuple1[T1])(using Quotes) = '{ Tuple1(${Expr(tup._1)}) } - } /** Default implementation of `ToExpr[Tuple2[T1, T2]]` */ - given Tuple2ToExpr[T1: Type: ToExpr, T2: Type: ToExpr]: ToExpr[Tuple2[T1, T2]] with { + given Tuple2ToExpr[T1: Type: ToExpr, T2: Type: ToExpr]: ToExpr[Tuple2[T1, T2]] with def apply(tup: Tuple2[T1, T2])(using Quotes) = '{ (${Expr(tup._1)}, ${Expr(tup._2)}) } - } /** Default implementation of `ToExpr[Tuple3[T1, T2, T3]]` */ - given Tuple3ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr]: ToExpr[Tuple3[T1, T2, T3]] with { + given Tuple3ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr]: ToExpr[Tuple3[T1, T2, T3]] with def apply(tup: Tuple3[T1, T2, T3])(using Quotes) = '{ (${Expr(tup._1)}, ${Expr(tup._2)}, ${Expr(tup._3)}) } - } /** Default implementation of `ToExpr[Tuple4[T1, T2, T3, T4]]` */ - given Tuple4ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr]: ToExpr[Tuple4[T1, T2, T3, T4]] with { + given Tuple4ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr]: ToExpr[Tuple4[T1, T2, T3, T4]] with def apply(tup: Tuple4[T1, T2, T3, T4])(using Quotes) = '{ (${Expr(tup._1)}, ${Expr(tup._2)}, ${Expr(tup._3)}, ${Expr(tup._4)}) } - } /** Default implementation of `ToExpr[Tuple5[T1, T2, T3, T4, T5]]` */ - given Tuple5ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr]: ToExpr[Tuple5[T1, T2, T3, T4, T5]] with { - def apply(tup: Tuple5[T1, T2, T3, T4, T5])(using Quotes) = { + given Tuple5ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr]: ToExpr[Tuple5[T1, T2, T3, T4, T5]] with + def apply(tup: Tuple5[T1, T2, T3, T4, T5])(using Quotes) = val (x1, x2, x3, x4, x5) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}) } - } - } /** Default implementation of `ToExpr[Tuple6[T1, T2, T3, T4, T5, T6]]` */ - given Tuple6ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr]: ToExpr[Tuple6[T1, T2, T3, T4, T5, T6]] with { - def apply(tup: Tuple6[T1, T2, T3, T4, T5, T6])(using Quotes) = { + given Tuple6ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr]: ToExpr[Tuple6[T1, T2, T3, T4, T5, T6]] with + def apply(tup: Tuple6[T1, T2, T3, T4, T5, T6])(using Quotes) = val (x1, x2, x3, x4, x5, x6) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}) } - } - } /** Default implementation of `ToExpr[Tuple7[T1, T2, T3, T4, T5, T6, T7]]` */ - given Tuple7ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr]: ToExpr[Tuple7[T1, T2, T3, T4, T5, T6, T7]] with { - def apply(tup: Tuple7[T1, T2, T3, T4, T5, T6, T7])(using Quotes) = { + given Tuple7ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr]: ToExpr[Tuple7[T1, T2, T3, T4, T5, T6, T7]] with + def apply(tup: Tuple7[T1, T2, T3, T4, T5, T6, T7])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}) } - } - } /** Default implementation of `ToExpr[Tuple8[T1, T2, T3, T4, T5, T6, T7, T8]]` */ - given Tuple8ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr]: ToExpr[Tuple8[T1, T2, T3, T4, T5, T6, T7, T8]] with { - def apply(tup: Tuple8[T1, T2, T3, T4, T5, T6, T7, T8])(using Quotes) = { + given Tuple8ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr]: ToExpr[Tuple8[T1, T2, T3, T4, T5, T6, T7, T8]] with + def apply(tup: Tuple8[T1, T2, T3, T4, T5, T6, T7, T8])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}) } - } - } /** Default implementation of `ToExpr[Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9]]` */ - given Tuple9ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr]: ToExpr[Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9]] with { - def apply(tup: Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9])(using Quotes) = { + given Tuple9ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr]: ToExpr[Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9]] with + def apply(tup: Tuple9[T1, T2, T3, T4, T5, T6, T7, T8, T9])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}) } - } - } /** Default implementation of `ToExpr[Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]]` */ - given Tuple10ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr]: ToExpr[Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]] with { - def apply(tup: Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10])(using Quotes) = { + given Tuple10ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr]: ToExpr[Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]] with + def apply(tup: Tuple10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}) } - } - } /** Default implementation of `ToExpr[Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]]` */ - given Tuple11ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr]: ToExpr[Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]] with { - def apply(tup: Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11])(using Quotes) = { + given Tuple11ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr]: ToExpr[Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11]] with + def apply(tup: Tuple11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}) } - } - } /** Default implementation of `ToExpr[Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]]` */ - given Tuple12ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr]: ToExpr[Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]] with { - def apply(tup: Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12])(using Quotes) = { + given Tuple12ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr]: ToExpr[Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12]] with + def apply(tup: Tuple12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}, ${Expr(x12)}) } - } - } /** Default implementation of `ToExpr[Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]]` */ - given Tuple13ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr]: ToExpr[Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]] with { - def apply(tup: Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13])(using Quotes) = { + given Tuple13ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr]: ToExpr[Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13]] with + def apply(tup: Tuple13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}, ${Expr(x12)}, ${Expr(x13)}) } - } - } /** Default implementation of `ToExpr[Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]]` */ - given Tuple14ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr]: ToExpr[Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]] with { - def apply(tup: Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14])(using Quotes) = { + given Tuple14ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr]: ToExpr[Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14]] with + def apply(tup: Tuple14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}, ${Expr(x12)}, ${Expr(x13)}, ${Expr(x14)}) } - } - } /** Default implementation of `ToExpr[Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]]` */ - given Tuple15ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr]: ToExpr[Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]] with { - def apply(tup: Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15])(using Quotes) = { + given Tuple15ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr]: ToExpr[Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15]] with + def apply(tup: Tuple15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}, ${Expr(x12)}, ${Expr(x13)}, ${Expr(x14)}, ${Expr(x15)}) } - } - } /** Default implementation of `ToExpr[Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]]` */ - given Tuple16ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr]: ToExpr[Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]] with { - def apply(tup: Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16])(using Quotes) = { + given Tuple16ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr]: ToExpr[Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16]] with + def apply(tup: Tuple16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}, ${Expr(x12)}, ${Expr(x13)}, ${Expr(x14)}, ${Expr(x15)}, ${Expr(x16)}) } - } - } /** Default implementation of `ToExpr[Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]]` */ - given Tuple17ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr]: ToExpr[Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]] with { - def apply(tup: Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17])(using Quotes) = { + given Tuple17ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr]: ToExpr[Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17]] with + def apply(tup: Tuple17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}, ${Expr(x12)}, ${Expr(x13)}, ${Expr(x14)}, ${Expr(x15)}, ${Expr(x16)}, ${Expr(x17)}) } - } - } /** Default implementation of `ToExpr[Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]]` */ - given Tuple18ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr, T18: Type: ToExpr]: ToExpr[Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]] with { - def apply(tup: Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18])(using Quotes) = { + given Tuple18ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr, T18: Type: ToExpr]: ToExpr[Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18]] with + def apply(tup: Tuple18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}, ${Expr(x12)}, ${Expr(x13)}, ${Expr(x14)}, ${Expr(x15)}, ${Expr(x16)}, ${Expr(x17)}, ${Expr(x18)}) } - } - } /** Default implementation of `ToExpr[Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]]` */ - given Tuple19ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr, T18: Type: ToExpr, T19: Type: ToExpr]: ToExpr[Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]] with { - def apply(tup: Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19])(using Quotes) = { + given Tuple19ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr, T18: Type: ToExpr, T19: Type: ToExpr]: ToExpr[Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19]] with + def apply(tup: Tuple19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}, ${Expr(x12)}, ${Expr(x13)}, ${Expr(x14)}, ${Expr(x15)}, ${Expr(x16)}, ${Expr(x17)}, ${Expr(x18)}, ${Expr(x19)}) } - } - } /** Default implementation of `ToExpr[Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]]` */ - given Tuple20ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr, T18: Type: ToExpr, T19: Type: ToExpr, T20: Type: ToExpr]: ToExpr[Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]] with { - def apply(tup: Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20])(using Quotes) = { + given Tuple20ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr, T18: Type: ToExpr, T19: Type: ToExpr, T20: Type: ToExpr]: ToExpr[Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20]] with + def apply(tup: Tuple20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}, ${Expr(x12)}, ${Expr(x13)}, ${Expr(x14)}, ${Expr(x15)}, ${Expr(x16)}, ${Expr(x17)}, ${Expr(x18)}, ${Expr(x19)}, ${Expr(x20)}) } - } - } /** Default implementation of `ToExpr[Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]]` */ - given Tuple21ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr, T18: Type: ToExpr, T19: Type: ToExpr, T20: Type: ToExpr, T21: Type: ToExpr]: ToExpr[Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]] with { - def apply(tup: Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21])(using Quotes) = { + given Tuple21ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr, T18: Type: ToExpr, T19: Type: ToExpr, T20: Type: ToExpr, T21: Type: ToExpr]: ToExpr[Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21]] with + def apply(tup: Tuple21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}, ${Expr(x12)}, ${Expr(x13)}, ${Expr(x14)}, ${Expr(x15)}, ${Expr(x16)}, ${Expr(x17)}, ${Expr(x18)}, ${Expr(x19)}, ${Expr(x20)}, ${Expr(x21)}) } - } - } /** Default implementation of `ToExpr[Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]]` */ - given Tuple22ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr, T18: Type: ToExpr, T19: Type: ToExpr, T20: Type: ToExpr, T21: Type: ToExpr, T22: Type: ToExpr]: ToExpr[Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]] with { - def apply(tup: Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22])(using Quotes) = { + given Tuple22ToExpr[T1: Type: ToExpr, T2: Type: ToExpr, T3: Type: ToExpr, T4: Type: ToExpr, T5: Type: ToExpr, T6: Type: ToExpr, T7: Type: ToExpr, T8: Type: ToExpr, T9: Type: ToExpr, T10: Type: ToExpr, T11: Type: ToExpr, T12: Type: ToExpr, T13: Type: ToExpr, T14: Type: ToExpr, T15: Type: ToExpr, T16: Type: ToExpr, T17: Type: ToExpr, T18: Type: ToExpr, T19: Type: ToExpr, T20: Type: ToExpr, T21: Type: ToExpr, T22: Type: ToExpr]: ToExpr[Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22]] with + def apply(tup: Tuple22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22])(using Quotes) = val (x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22) = tup '{ (${Expr(x1)}, ${Expr(x2)}, ${Expr(x3)}, ${Expr(x4)}, ${Expr(x5)}, ${Expr(x6)}, ${Expr(x7)}, ${Expr(x8)}, ${Expr(x9)}, ${Expr(x10)}, ${Expr(x11)}, ${Expr(x12)}, ${Expr(x13)}, ${Expr(x14)}, ${Expr(x15)}, ${Expr(x16)}, ${Expr(x17)}, ${Expr(x18)}, ${Expr(x19)}, ${Expr(x20)}, ${Expr(x21)}, ${Expr(x22)}) } - } - } /** Default implementation of `ToExpr[H *: T]` */ - given TupleConsToExpr [H: Type: ToExpr, T <: Tuple: Type: ToExpr]: ToExpr[H *: T] with { + given TupleConsToExpr [H: Type: ToExpr, T <: Tuple: Type: ToExpr]: ToExpr[H *: T] with def apply(tup: H *: T)(using Quotes): Expr[H *: T] = val head = Expr[H](tup.head) val tail = Expr[T](tup.tail) '{ $head *: $tail } - } /** Default implementation of `ToExpr[BigInt]` */ - given BigIntToExpr: ToExpr[BigInt] with { + given BigIntToExpr: ToExpr[BigInt] with def apply(x: BigInt)(using Quotes): Expr[BigInt] = '{ BigInt(${Expr(x.toByteArray)}) } - } /** Default implementation of `ToExpr[BigDecimal using the default MathContext]` */ - given BigDecimalToExpr: ToExpr[BigDecimal] with { + given BigDecimalToExpr: ToExpr[BigDecimal] with def apply(x: BigDecimal)(using Quotes): Expr[BigDecimal] = '{ BigDecimal(${Expr(x.toString)}) } - } /** Default implementation of `ToExpr[StringContext]` */ - given StringContextToExpr: ToExpr[StringContext] with { + given StringContextToExpr: ToExpr[StringContext] with def apply(stringContext: StringContext)(using Quotes): Expr[StringContext] = val parts = Varargs(stringContext.parts.map(Expr(_))) '{ StringContext($parts: _*) } - } -} diff --git a/library/src/scala/quoted/Type.scala b/library/src/scala/quoted/Type.scala index b035bdd6e52f..738a0eb4d313 100644 --- a/library/src/scala/quoted/Type.scala +++ b/library/src/scala/quoted/Type.scala @@ -75,11 +75,10 @@ object Type: def rec(tpe: TypeRepr): Option[Tuple] = tpe.widenTermRefByName.dealias match case AppliedType(fn, tpes) if defn.isTupleClass(fn.typeSymbol) => - tpes.foldRight(Option[Tuple](EmptyTuple)) { + tpes.foldRight(Option[Tuple](EmptyTuple)): case (_, None) => None case (ValueOf(v), Some(acc)) => Some(v *: acc) case _ => None - } case AppliedType(tp, List(ValueOf(headValue), tail)) if tp.derivesFrom(cons) => rec(tail) match case Some(tailValue) => Some(headValue *: tailValue) diff --git a/library/src/scala/quoted/Varargs.scala b/library/src/scala/quoted/Varargs.scala index e2e74c3879c6..133621ccb8fb 100644 --- a/library/src/scala/quoted/Varargs.scala +++ b/library/src/scala/quoted/Varargs.scala @@ -5,7 +5,7 @@ package scala.quoted * `Varargs` can be used to create the an expression `args` that will be used as varargs `'{ f($args: _*) }` * or it can be used to extract all the arguments of the a varargs. */ -object Varargs { +object Varargs: /** * Lifts this sequence of expressions into an expression of a sequence @@ -27,10 +27,9 @@ object Varargs { * //} * ``` */ - def apply[T](xs: Seq[Expr[T]])(using Type[T])(using Quotes): Expr[Seq[T]] = { + def apply[T](xs: Seq[Expr[T]])(using Type[T])(using Quotes): Expr[Seq[T]] = import quotes.reflect._ Repeated(xs.map(_.asTerm).toList, TypeTree.of[T]).asExpr.asInstanceOf[Expr[Seq[T]]] - } /** Matches a literal sequence of expressions and return a sequence of expressions. * @@ -42,16 +41,13 @@ object Varargs { * // argVarargs: Seq[Expr[Int]] * */ - def unapply[T](expr: Expr[Seq[T]])(using Quotes): Option[Seq[Expr[T]]] = { + def unapply[T](expr: Expr[Seq[T]])(using Quotes): Option[Seq[Expr[T]]] = import quotes.reflect._ - def rec(tree: Term): Option[Seq[Expr[T]]] = tree match { + def rec(tree: Term): Option[Seq[Expr[T]]] = tree match case Repeated(elems, _) => Some(elems.map(x => x.asExpr.asInstanceOf[Expr[T]])) case Typed(e, _) => rec(e) case Block(Nil, e) => rec(e) case Inlined(_, Nil, e) => rec(e) case _ => None - } rec(expr.asTerm) - } -} diff --git a/library/src/scala/quoted/runtime/Patterns.scala b/library/src/scala/quoted/runtime/Patterns.scala index 91ad23c62a98..5de8b6e6c635 100644 --- a/library/src/scala/quoted/runtime/Patterns.scala +++ b/library/src/scala/quoted/runtime/Patterns.scala @@ -3,7 +3,7 @@ package scala.quoted.runtime import scala.annotation.{Annotation, compileTimeOnly} @compileTimeOnly("Illegal reference to `scala.quoted.runtime.Patterns`") -object Patterns { +object Patterns: /** A splice in a quoted pattern is desugared by the compiler into a call to this method. * @@ -40,4 +40,3 @@ object Patterns { @compileTimeOnly("Illegal reference to `scala.quoted.runtime.Patterns.fromAbove`") class fromAbove extends Annotation -} diff --git a/library/src/scala/runtime/Arrays.scala b/library/src/scala/runtime/Arrays.scala index 2d98caea4df8..582ae6510a46 100644 --- a/library/src/scala/runtime/Arrays.scala +++ b/library/src/scala/runtime/Arrays.scala @@ -7,7 +7,7 @@ import java.lang.{reflect => jlr} /** All but the first two operations should be short-circuited and implemented specially by * the backend. */ -object Arrays { +object Arrays: // note: this class is magical. Do not touch it unless you know what you are doing.` @@ -18,14 +18,12 @@ object Arrays { tag.newArray(length) /** Convert a sequence to a Java array with element type given by `clazz`. */ - def seqToArray[T](xs: Seq[T], clazz: Class[_]): Array[T] = { + def seqToArray[T](xs: Seq[T], clazz: Class[_]): Array[T] = val arr = java.lang.reflect.Array.newInstance(clazz, xs.length).asInstanceOf[Array[T]] xs.copyToArray(arr) arr - } /** Create an array of a reference type T. */ def newArray[Arr](componentType: Class[_], returnType: Class[Arr], dimensions: Array[Int]): Arr = jlr.Array.newInstance(componentType, dimensions: _*).asInstanceOf[Arr] -} diff --git a/library/src/scala/runtime/FunctionXXL.scala b/library/src/scala/runtime/FunctionXXL.scala index 250e03577e08..0737212778c6 100644 --- a/library/src/scala/runtime/FunctionXXL.scala +++ b/library/src/scala/runtime/FunctionXXL.scala @@ -1,10 +1,9 @@ package scala.runtime /** A function with all parameters grouped in an array. */ -trait FunctionXXL { +trait FunctionXXL: /** Apply all parameters grouped in xs to this function. */ def apply(xs: IArray[Object]): Object override def toString() = "" -} diff --git a/library/src/scala/runtime/LazyVals.scala b/library/src/scala/runtime/LazyVals.scala index 0edbe0e748f4..c4c5d246bacf 100644 --- a/library/src/scala/runtime/LazyVals.scala +++ b/library/src/scala/runtime/LazyVals.scala @@ -7,9 +7,9 @@ import scala.annotation.* /** * Helper methods used in thread-safe lazy vals. */ -object LazyVals { +object LazyVals: @nowarn - private[this] val unsafe: sun.misc.Unsafe = { + private[this] val unsafe: sun.misc.Unsafe = def throwInitializationException() = throw new ExceptionInInitializerError( new IllegalStateException("Can't find instance of sun.misc.Unsafe") @@ -23,22 +23,19 @@ object LazyVals { throwInitializationException() catch case _: NoSuchFieldException => throwInitializationException() - } - private[this] val base: Int = { + private[this] val base: Int = val processors = java.lang.Runtime.getRuntime.nn.availableProcessors() 8 * processors * processors - } private[this] val monitors: Array[Object] = Array.tabulate(base)(_ => new Object) - private def getMonitor(obj: Object, fieldId: Int = 0) = { + private def getMonitor(obj: Object, fieldId: Int = 0) = var id = (java.lang.System.identityHashCode(obj) + fieldId) % base if (id < 0) id += base monitors(id) - } private final val LAZY_VAL_MASK = 3L private final val debug = false @@ -68,88 +65,73 @@ object LazyVals { final val BITS_PER_LAZY_VAL = 2L - def STATE(cur: Long, ord: Int): Long = { + def STATE(cur: Long, ord: Int): Long = val r = (cur >> (ord * BITS_PER_LAZY_VAL)) & LAZY_VAL_MASK if (debug) println(s"STATE($cur, $ord) = $r") r - } - def CAS(t: Object, offset: Long, e: Long, v: Int, ord: Int): Boolean = { + def CAS(t: Object, offset: Long, e: Long, v: Int, ord: Int): Boolean = if (debug) println(s"CAS($t, $offset, $e, $v, $ord)") val mask = ~(LAZY_VAL_MASK << ord * BITS_PER_LAZY_VAL) val n = (e & mask) | (v.toLong << (ord * BITS_PER_LAZY_VAL)) unsafe.compareAndSwapLong(t, offset, e, n) - } - def objCAS(t: Object, offset: Long, exp: Object, n: Object): Boolean = { + def objCAS(t: Object, offset: Long, exp: Object, n: Object): Boolean = if (debug) println(s"objCAS($t, $exp, $n)") unsafe.compareAndSwapObject(t, offset, exp, n) - } - def setFlag(t: Object, offset: Long, v: Int, ord: Int): Unit = { + def setFlag(t: Object, offset: Long, v: Int, ord: Int): Unit = if (debug) println(s"setFlag($t, $offset, $v, $ord)") var retry = true - while (retry) { + while (retry) val cur = get(t, offset) if (STATE(cur, ord) == 1) retry = !CAS(t, offset, cur, v, ord) - else { + else // cur == 2, somebody is waiting on monitor - if (CAS(t, offset, cur, v, ord)) { + if (CAS(t, offset, cur, v, ord)) val monitor = getMonitor(t, ord) - monitor.synchronized { + monitor.synchronized: monitor.notifyAll() - } retry = false - } - } - } - } - def wait4Notification(t: Object, offset: Long, cur: Long, ord: Int): Unit = { + def wait4Notification(t: Object, offset: Long, cur: Long, ord: Int): Unit = if (debug) println(s"wait4Notification($t, $offset, $cur, $ord)") var retry = true - while (retry) { + while (retry) val cur = get(t, offset) val state = STATE(cur, ord) if (state == 1) CAS(t, offset, cur, 2, ord) - else if (state == 2) { + else if (state == 2) val monitor = getMonitor(t, ord) - monitor.synchronized { + monitor.synchronized: if (STATE(get(t, offset), ord) == 2) // make sure notification did not happen yet. monitor.wait() - } - } else retry = false - } - } - def get(t: Object, off: Long): Long = { + def get(t: Object, off: Long): Long = if (debug) println(s"get($t, $off)") unsafe.getLongVolatile(t, off) - } // kept for backward compatibility - def getOffset(clz: Class[_], name: String): Long = { + def getOffset(clz: Class[_], name: String): Long = @nowarn val r = unsafe.objectFieldOffset(clz.getDeclaredField(name)) if (debug) println(s"getOffset($clz, $name) = $r") r - } - def getStaticFieldOffset(field: java.lang.reflect.Field): Long = { + def getStaticFieldOffset(field: java.lang.reflect.Field): Long = @nowarn val r = unsafe.staticFieldOffset(field) if (debug) println(s"getStaticFieldOffset(${field.getDeclaringClass}, ${field.getName}) = $r") r - } def getOffsetStatic(field: java.lang.reflect.Field) = @nowarn @@ -159,12 +141,10 @@ object LazyVals { r - object Names { + object Names: final val state = "STATE" final val cas = "CAS" final val setFlag = "setFlag" final val wait4Notification = "wait4Notification" final val get = "get" final val getOffset = "getOffset" - } -} diff --git a/library/src/scala/runtime/TupleXXL.scala b/library/src/scala/runtime/TupleXXL.scala index 22c3cc521427..08f6530e689e 100644 --- a/library/src/scala/runtime/TupleXXL.scala +++ b/library/src/scala/runtime/TupleXXL.scala @@ -1,6 +1,6 @@ package scala.runtime -final class TupleXXL private (es: IArray[Object]) extends Product { +final class TupleXXL private (es: IArray[Object]) extends Product: assert(es.length > 22) def productElement(n: Int): Any = es(n) @@ -13,36 +13,30 @@ final class TupleXXL private (es: IArray[Object]) extends Product { override def hashCode: Int = scala.runtime.ScalaRunTime._hashCode(this) - override def canEqual(that: Any): Boolean = that match { + override def canEqual(that: Any): Boolean = that match case that: TupleXXL => that.productArity == this.productArity case _ => false - } - override def equals(that: Any): Boolean = that match { + override def equals(that: Any): Boolean = that match case that: TupleXXL => - es.asInstanceOf[AnyRef].eq(that.elems.asInstanceOf[AnyRef]) || { + es.asInstanceOf[AnyRef].eq(that.elems.asInstanceOf[AnyRef]) `||`: if es.length != that.elems.length then return false var i = 0 while i < es.length do if es(i) != that.elems(i) then return false i += 1 true - } case _ => false - } def elems: IArray[Object] = es - def tailXXL: TupleXXL = { + def tailXXL: TupleXXL = assert(es.length > 23) new TupleXXL(es.asInstanceOf[Array[Object]].tail.asInstanceOf[IArray[Object]]) // TODO use IArray.tail - } def toArray: Array[Object] = es.asInstanceOf[Array[Object]].clone // TODO use IArray.toArray -} -object TupleXXL { +object TupleXXL: def fromIterator(elems: Iterator[Any]): TupleXXL = new TupleXXL(elems.map(_.asInstanceOf[Object]).toArray.asInstanceOf[IArray[Object]]) // TODO use Iterator.toIArray def fromIArray(elems: IArray[Object]): TupleXXL = new TupleXXL(elems) def apply(elems: Any*): TupleXXL = new TupleXXL(IArray(elems.asInstanceOf[Seq[AnyRef]]: _*)) def unapplySeq(x: TupleXXL): Option[Seq[Any]] = Some(x.elems.asInstanceOf[Array[Object]].toSeq) // TODO use IArray.toSeq -} diff --git a/library/src/scala/runtime/Tuples.scala b/library/src/scala/runtime/Tuples.scala index c16d8c2a201a..05d509db18b6 100644 --- a/library/src/scala/runtime/Tuples.scala +++ b/library/src/scala/runtime/Tuples.scala @@ -1,32 +1,28 @@ package scala.runtime -object Tuples { +object Tuples: inline val MaxSpecialized = 22 - def toArray(self: Tuple): Array[Object] = (self: Any) match { + def toArray(self: Tuple): Array[Object] = (self: Any) match case EmptyTuple => Array.emptyObjectArray case self: TupleXXL => self.toArray case self: Product => productToArray(self) - } - def toIArray(self: Tuple): IArray[Object] = (self: Any) match { + def toIArray(self: Tuple): IArray[Object] = (self: Any) match case EmptyTuple => Array.emptyObjectArray.asInstanceOf[IArray[Object]] case self: TupleXXL => self.elems case self: Product => productToArray(self).asInstanceOf[IArray[Object]] - } - def productToArray(self: Product): Array[Object] = { + def productToArray(self: Product): Array[Object] = val arr = new Array[Object](self.productArity) var i = 0 - while (i < arr.length) { + while (i < arr.length) arr(i) = self.productElement(i).asInstanceOf[Object] i += 1 - } arr - } - def fromArray(xs: Array[Object]): Tuple = xs.length match { + def fromArray(xs: Array[Object]): Tuple = xs.length match case 0 => EmptyTuple case 1 => Tuple1(xs(0)) case 2 => Tuple2(xs(0), xs(1)) @@ -51,7 +47,6 @@ object Tuples { case 21 => Tuple21(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20)) case 22 => Tuple22(xs(0), xs(1), xs(2), xs(3), xs(4), xs(5), xs(6), xs(7), xs(8), xs(9), xs(10), xs(11), xs(12), xs(13), xs(14), xs(15), xs(16), xs(17), xs(18), xs(19), xs(20), xs(21)) case _ => TupleXXL.fromIArray(xs.clone().asInstanceOf[IArray[Object]]).asInstanceOf[Tuple] - } def fromIArray(xs: IArray[Object]): Tuple = if (xs.length <= 22) fromArray(xs.asInstanceOf[Array[Object]]) @@ -60,115 +55,93 @@ object Tuples { def fromProduct(xs: Product): Tuple = (xs.productArity match { case 0 => EmptyTuple case 1 => - xs match { + xs match case xs: Tuple1[_] => xs case xs => Tuple1(xs.productElement(0)) - } case 2 => - xs match { + xs match case xs: Tuple2[_, _] => xs case xs => Tuple2(xs.productElement(0), xs.productElement(1)) - } case 3 => - xs match { + xs match case xs: Tuple3[_, _, _] => xs case xs => Tuple3(xs.productElement(0), xs.productElement(1), xs.productElement(2)) - } case 4 => - xs match { + xs match case xs: Tuple4[_, _, _, _] => xs case xs => Tuple4(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3)) - } case 5 => - xs match { + xs match case xs: Tuple5[_, _, _, _, _] => xs case xs => Tuple5(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4)) - } case 6 => - xs match { + xs match case xs: Tuple6[_, _, _, _, _, _] => xs case xs => Tuple6(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5)) - } case 7 => - xs match { + xs match case xs: Tuple7[_, _, _, _, _, _, _] => xs case xs => Tuple7(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6)) - } case 8 => - xs match { + xs match case xs: Tuple8[_, _, _, _, _, _, _, _] => xs case xs => Tuple8(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7)) - } case 9 => - xs match { + xs match case xs: Tuple9[_, _, _, _, _, _, _, _, _] => xs case xs => Tuple9(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8)) - } case 10 => - xs match { + xs match case xs: Tuple10[_, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple10(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9)) - } case 11 => - xs match { + xs match case xs: Tuple11[_, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple11(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10)) - } case 12 => - xs match { + xs match case xs: Tuple12[_, _, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple12(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10), xs.productElement(11)) - } case 13 => - xs match { + xs match case xs: Tuple13[_, _, _, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple13(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10), xs.productElement(11), xs.productElement(12)) - } case 14 => - xs match { + xs match case xs: Tuple14[_, _, _, _, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple14(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10), xs.productElement(11), xs.productElement(12), xs.productElement(13)) - } case 15 => - xs match { + xs match case xs: Tuple15[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple15(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10), xs.productElement(11), xs.productElement(12), xs.productElement(13), xs.productElement(14)) - } case 16 => - xs match { + xs match case xs: Tuple16[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple16(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10), xs.productElement(11), xs.productElement(12), xs.productElement(13), xs.productElement(14), xs.productElement(15)) - } case 17 => - xs match { + xs match case xs: Tuple17[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple17(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10), xs.productElement(11), xs.productElement(12), xs.productElement(13), xs.productElement(14), xs.productElement(15), xs.productElement(16)) - } case 18 => - xs match { + xs match case xs: Tuple18[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple18(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10), xs.productElement(11), xs.productElement(12), xs.productElement(13), xs.productElement(14), xs.productElement(15), xs.productElement(16), xs.productElement(17)) - } case 19 => - xs match { + xs match case xs: Tuple19[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple19(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10), xs.productElement(11), xs.productElement(12), xs.productElement(13), xs.productElement(14), xs.productElement(15), xs.productElement(16), xs.productElement(17), xs.productElement(18)) - } case 20 => - xs match { + xs match case xs: Tuple20[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple20(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10), xs.productElement(11), xs.productElement(12), xs.productElement(13), xs.productElement(14), xs.productElement(15), xs.productElement(16), xs.productElement(17), xs.productElement(18), xs.productElement(19)) - } case 21 => - xs match { + xs match case xs: Tuple21[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple21(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10), xs.productElement(11), xs.productElement(12), xs.productElement(13), xs.productElement(14), xs.productElement(15), xs.productElement(16), xs.productElement(17), xs.productElement(18), xs.productElement(19), xs.productElement(20)) - } case 22 => - xs match { + xs match case xs: Tuple22[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => xs case xs => Tuple22(xs.productElement(0), xs.productElement(1), xs.productElement(2), xs.productElement(3), xs.productElement(4), xs.productElement(5), xs.productElement(6), xs.productElement(7), xs.productElement(8), xs.productElement(9), xs.productElement(10), xs.productElement(11), xs.productElement(12), xs.productElement(13), xs.productElement(14), xs.productElement(15), xs.productElement(16), xs.productElement(17), xs.productElement(18), xs.productElement(19), xs.productElement(20), xs.productElement(21)) - } case _ => (xs match { case xs: TupleXXL => xs @@ -177,8 +150,8 @@ object Tuples { }) // Cons for Tuple1 to Tuple22 - private def specialCaseCons(x: Any, self: Tuple): Tuple = { - (self: Any) match { + private def specialCaseCons(x: Any, self: Tuple): Tuple = + (self: Any) match case EmptyTuple => Tuple1(x) case self: Tuple1[_] => @@ -235,23 +208,19 @@ object Tuples { self._21.asInstanceOf[Object], self._22.asInstanceOf[Object], ) TupleXXL.fromIArray(arr.asInstanceOf[IArray[Object]]).asInstanceOf[Tuple] - } - } // Cons for TupleXXL - private def xxlCons(x: Any, xxl: TupleXXL): TupleXXL = { + private def xxlCons(x: Any, xxl: TupleXXL): TupleXXL = val arr = new Array[Object](xxl.productArity + 1) arr(0) = x.asInstanceOf[Object] System.arraycopy(xxl.elems, 0, arr, 1, xxl.productArity) TupleXXL.fromIArray(arr.asInstanceOf[IArray[Object]]) - } - def cons(x: Any, self: Tuple): Tuple = (self: Any) match { + def cons(x: Any, self: Tuple): Tuple = (self: Any) match case xxl: TupleXXL => xxlCons(x, xxl).asInstanceOf[Tuple] case _ => specialCaseCons(x, self) - } - def concat[This <: Tuple, That <: Tuple](self: This, that: That): Tuple = { + def concat[This <: Tuple, That <: Tuple](self: This, that: That): Tuple = val selfSize: Int = self.size // If one of the tuples is empty, we can leave early if selfSize == 0 then @@ -264,28 +233,25 @@ object Tuples { val arr = new Array[Object](selfSize + thatSize) // Copies the tuple to an array, at the given offset - inline def copyToArray[T <: Tuple](tuple: T, size: Int, array: Array[Object], offset: Int): Unit = (tuple: Any) match { + inline def copyToArray[T <: Tuple](tuple: T, size: Int, array: Array[Object], offset: Int): Unit = (tuple: Any) match case xxl: TupleXXL => System.arraycopy(xxl.elems, 0, array, offset, size) case _ => tuple.productIterator.asInstanceOf[Iterator[Object]] .copyToArray(array, offset, size) - } // In the general case, we copy the two tuples to an array, and convert it back to a tuple copyToArray(self, selfSize, arr, 0) copyToArray(that, thatSize, arr, selfSize) fromIArray(arr.asInstanceOf[IArray[Object]]) - } - def size(self: Tuple): Int = (self: Any) match { + def size(self: Tuple): Int = (self: Any) match case EmptyTuple => 0 case self: Product => self.productArity - } // Tail for Tuple1 to Tuple22 - private def specialCaseTail(self: Tuple): Tuple = { - (self: Any) match { + private def specialCaseTail(self: Tuple): Tuple = + (self: Any) match case self: Tuple1[_] => EmptyTuple case self: Tuple2[_, _] => @@ -330,12 +296,10 @@ object Tuples { Tuple20(self._2, self._3, self._4, self._5, self._6, self._7, self._8, self._9, self._10, self._11, self._12, self._13, self._14, self._15, self._16, self._17, self._18, self._19, self._20, self._21) case self: Tuple22[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => Tuple21(self._2, self._3, self._4, self._5, self._6, self._7, self._8, self._9, self._10, self._11, self._12, self._13, self._14, self._15, self._16, self._17, self._18, self._19, self._20, self._21, self._22) - } - } // Tail for TupleXXL - private def xxlTail(xxl: TupleXXL): Tuple = { - if (xxl.productArity == 23) { + private def xxlTail(xxl: TupleXXL): Tuple = + if (xxl.productArity == 23) val elems = xxl.elems Tuple22( elems(1), elems(2), elems(3), elems(4), elems(5), elems(6), elems(7), @@ -343,29 +307,25 @@ object Tuples { elems(15), elems(16), elems(17), elems(18), elems(19), elems(20), elems(21), elems(22) ) - } else { + else val arr = new Array[Object](xxl.elems.length - 1) System.arraycopy(xxl.elems, 1, arr, 0, xxl.elems.length - 1) TupleXXL.fromIArray(arr.asInstanceOf[IArray[Object]]).asInstanceOf[Tuple] - } - } - def tail(self: NonEmptyTuple): Tuple = (self: Any) match { + def tail(self: NonEmptyTuple): Tuple = (self: Any) match case xxl: TupleXXL => xxlTail(xxl) case _ => specialCaseTail(self) - } // Append for TupleXXL - private def xxlAppend(x: Any, xxl: TupleXXL): TupleXXL = { + private def xxlAppend(x: Any, xxl: TupleXXL): TupleXXL = val arr = new Array[Object](xxl.productArity + 1) arr(xxl.productArity) = x.asInstanceOf[Object] System.arraycopy(xxl.elems, 0, arr, 0, xxl.productArity) TupleXXL.fromIArray(arr.asInstanceOf[IArray[Object]]) - } // Append for Tuple1 to Tuple22 - private def specialCaseAppend(x: Any, self: Tuple): Tuple = { - (self: Any) match { + private def specialCaseAppend(x: Any, self: Tuple): Tuple = + (self: Any) match case EmptyTuple => Tuple1(x) case self: Tuple1[_] => @@ -422,17 +382,14 @@ object Tuples { self._21.asInstanceOf[Object], self._22.asInstanceOf[Object], x.asInstanceOf[Object] ) TupleXXL.fromIArray(arr.asInstanceOf[IArray[Object]]).asInstanceOf[Tuple] - } - } - def append(x: Any, self: Tuple): Tuple = (self: Any) match { + def append(x: Any, self: Tuple): Tuple = (self: Any) match case xxl: TupleXXL => xxlAppend(x, xxl).asInstanceOf[Tuple] case _ => specialCaseAppend(x, self) - } // Init for TupleXXL - private def xxlInit(xxl: TupleXXL): Tuple = { - if (xxl.productArity == 23) { + private def xxlInit(xxl: TupleXXL): Tuple = + if (xxl.productArity == 23) val elems = xxl.elems Tuple22( elems(0), elems(1), elems(2), elems(3), elems(4), elems(5), @@ -440,16 +397,14 @@ object Tuples { elems(12), elems(13), elems(14), elems(15), elems(16), elems(17), elems(18), elems(19), elems(20), elems(21) ) - } else { + else val arr = new Array[Object](xxl.elems.length - 1) System.arraycopy(xxl.elems, 0, arr, 0, xxl.elems.length - 1) TupleXXL.fromIArray(arr.asInstanceOf[IArray[Object]]).asInstanceOf[Tuple] - } - } // Init for Tuple1 to Tuple22 - private def specialCaseInit(self: Tuple): Tuple = { - (self: Any) match { + private def specialCaseInit(self: Tuple): Tuple = + (self: Any) match case _: Tuple1[_] => EmptyTuple case self: Tuple2[_, _] => @@ -494,33 +449,27 @@ object Tuples { Tuple20(self._1, self._2, self._3, self._4, self._5, self._6, self._7, self._8, self._9, self._10, self._11, self._12, self._13, self._14, self._15, self._16, self._17, self._18, self._19, self._20) case self: Tuple22[_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _] => Tuple21(self._1, self._2, self._3, self._4, self._5, self._6, self._7, self._8, self._9, self._10, self._11, self._12, self._13, self._14, self._15, self._16, self._17, self._18, self._19, self._20, self._21) - } - } - def init(self: NonEmptyTuple): Tuple = (self: Any) match { + def init(self: NonEmptyTuple): Tuple = (self: Any) match case xxl: TupleXXL => xxlInit(xxl) case _ => specialCaseInit(self) - } - def last(self: NonEmptyTuple): Any = (self: Any) match { + def last(self: NonEmptyTuple): Any = (self: Any) match case self: Product => self.productElement(self.productArity - 1) - } def apply(self: NonEmptyTuple, n: Int): Any = self.productElement(n) // Benchmarks showed that this is faster than doing (it1 zip it2).copyToArray(...) - private def zipIterators(it1: Iterator[Any], it2: Iterator[Any], size: Int): IArray[Object] = { + private def zipIterators(it1: Iterator[Any], it2: Iterator[Any], size: Int): IArray[Object] = val arr = new Array[Object](size) var i = 0 - while (i < size) { + while (i < size) arr(i) = (it1.next(), it2.next()) i += 1 - } arr.asInstanceOf[IArray[Object]] - } - def zip(t1: Tuple, t2: Tuple): Tuple = { + def zip(t1: Tuple, t2: Tuple): Tuple = val t1Size: Int = t1.size val t2Size: Int = t2.size val size = Math.min(t1Size, t2Size) @@ -532,21 +481,19 @@ object Tuples { size ) ) - } - def map[F[_]](self: Tuple, f: [t] => t => F[t]): Tuple = self match { + def map[F[_]](self: Tuple, f: [t] => t => F[t]): Tuple = self match case EmptyTuple => self case _ => fromIArray(self.productIterator.map(f(_).asInstanceOf[Object]).toArray.asInstanceOf[IArray[Object]]) // TODO use toIArray - } - def take(self: Tuple, n: Int): Tuple = { + def take(self: Tuple, n: Int): Tuple = if (n < 0) throw new IndexOutOfBoundsException(n.toString) val selfSize: Int = self.size val actualN = Math.min(n, selfSize) if (actualN == 0) EmptyTuple - else { - val arr = (self: Any) match { + else + val arr = (self: Any) match case xxl: TupleXXL => xxl.elems.asInstanceOf[Array[Object]].take(actualN) case _ => @@ -554,21 +501,18 @@ object Tuples { self.productIterator.asInstanceOf[Iterator[Object]] .copyToArray(arr, 0, actualN) arr - } fromIArray(arr.asInstanceOf[IArray[Object]]) - } - } - def drop(self: Tuple, n: Int): Tuple = { + def drop(self: Tuple, n: Int): Tuple = if (n < 0) throw new IndexOutOfBoundsException(n.toString) val size = self.size val actualN = Math.min(n, size) val rem = size - actualN if (rem == 0) EmptyTuple - else { - val arr = (self: Any) match { + else + val arr = (self: Any) match case xxl: TupleXXL => xxl.elems.asInstanceOf[Array[Object]].drop(actualN) case _ => @@ -576,17 +520,14 @@ object Tuples { self.productIterator.asInstanceOf[Iterator[Object]] .drop(actualN).copyToArray(arr, 0, rem) arr - } fromIArray(arr.asInstanceOf[IArray[Object]]) - } - } - def splitAt(self: Tuple, n: Int): (Tuple, Tuple) = { + def splitAt(self: Tuple, n: Int): (Tuple, Tuple) = if (n < 0) throw new IndexOutOfBoundsException(n.toString) val size = self.size val actualN = Math.min(n, size) - val (arr1, arr2) = (self: Any) match { + val (arr1, arr2) = (self: Any) match case EmptyTuple => (Array.empty[Object], Array.empty[Object]) case xxl: TupleXXL => xxl.elems.asInstanceOf[Array[Object]].splitAt(actualN) @@ -597,13 +538,11 @@ object Tuples { it.copyToArray(arr1, 0, actualN) it.copyToArray(arr2, 0, size - actualN) (arr1, arr2) - } ( fromIArray(arr1.asInstanceOf[IArray[Object]]), fromIArray(arr2.asInstanceOf[IArray[Object]]) ) - } def consIterator(head: Any, tail: Tuple): Iterator[Any] = Iterator.single(head) ++ tail.productIterator @@ -674,4 +613,3 @@ object Tuples { case _ => false -} diff --git a/library/src/scala/runtime/TypeBox.scala b/library/src/scala/runtime/TypeBox.scala index ec2e42eeafc7..537dad7ddef5 100644 --- a/library/src/scala/runtime/TypeBox.scala +++ b/library/src/scala/runtime/TypeBox.scala @@ -6,6 +6,5 @@ package scala.runtime * instantiatable type variable. To be able to instantiate `X`, we cast the tree to type * `X[$n.CAP]` where `$n` is a fresh skolem type with underlying type `TypeBox[L, U]`. */ -final abstract class TypeBox[-L <: U, +U] { +final abstract class TypeBox[-L <: U, +U]: type CAP >: L <: U -} diff --git a/library/src/scala/runtime/coverage/Invoker.scala b/library/src/scala/runtime/coverage/Invoker.scala index c35c6c2ec7df..c8063bd6d23a 100644 --- a/library/src/scala/runtime/coverage/Invoker.scala +++ b/library/src/scala/runtime/coverage/Invoker.scala @@ -8,7 +8,7 @@ import java.io.{File, FileWriter} import java.nio.file.Files @sharable // avoids false positive by -Ycheck-reentrant -object Invoker { +object Invoker: private val runtimeUUID = java.util.UUID.randomUUID() private val MeasurementsPrefix = "scoverage.measurements." @@ -32,9 +32,8 @@ object Invoker { def invoked(id: Int, dataDir: String): Unit = val set = dataDirToSet.getOrElseUpdate(dataDir, BitSet.empty) if !set.contains(id) then - val added = set.synchronized { + val added = set.synchronized: set.add(id) - } if added then var writers = threadFiles.get() if writers == null then @@ -53,4 +52,3 @@ object Invoker { dataDir, MeasurementsPrefix + runtimeUUID + "." + Thread.currentThread.nn.getId ) -} diff --git a/library/src/scala/runtime/stdLibPatches/Predef.scala b/library/src/scala/runtime/stdLibPatches/Predef.scala index 09feaf11c31d..b6f47e22275e 100644 --- a/library/src/scala/runtime/stdLibPatches/Predef.scala +++ b/library/src/scala/runtime/stdLibPatches/Predef.scala @@ -22,9 +22,8 @@ object Predef: * }}} * @group utilities */ - inline def valueOf[T]: T = summonFrom { + inline def valueOf[T]: T = summonFrom: case ev: ValueOf[T] => ev.value - } /** Summon a given value of type `T`. Usually, the argument is not passed explicitly. * diff --git a/library/src/scala/util/CommandLineParser.scala b/library/src/scala/util/CommandLineParser.scala index fd239ef231c5..a9fcc584ecd8 100644 --- a/library/src/scala/util/CommandLineParser.scala +++ b/library/src/scala/util/CommandLineParser.scala @@ -1,7 +1,7 @@ package scala.util /** A utility object to support command line parsing for @main methods */ -object CommandLineParser { +object CommandLineParser: /** An exception raised for an illegal command line * @param idx The index of the argument that's faulty (starting from 0) @@ -12,12 +12,10 @@ object CommandLineParser { /** Parse command line argument `s`, which has index `n`, as a value of type `T` * @throws ParseError if argument cannot be converted to type `T`. */ - def parseString[T](str: String, n: Int)(using fs: FromString[T]): T = { + def parseString[T](str: String, n: Int)(using fs: FromString[T]): T = try fs.fromString(str) - catch { + catch case ex: IllegalArgumentException => throw ParseError(n, ex.toString) - } - } /** Parse `n`'th argument in `args` (counting from 0) as a value of type `T` * @throws ParseError if argument does not exist or cannot be converted to type `T`. @@ -34,26 +32,23 @@ object CommandLineParser { else Nil /** Print error message explaining given ParserError */ - def showError(err: ParseError): Unit = { + def showError(err: ParseError): Unit = val where = if err.idx == 0 then "" else if err.idx == 1 then " after first argument" else s" after ${err.idx} arguments" println(s"Illegal command line$where: ${err.msg}") - } - trait FromString[T] { + trait FromString[T]: /** Can throw java.lang.IllegalArgumentException */ def fromString(s: String): T def fromStringOption(s: String): Option[T] = try Some(fromString(s)) - catch { + catch case ex: IllegalArgumentException => None - } - } - object FromString { + object FromString: given FromString[String] with def fromString(s: String) = s @@ -77,5 +72,3 @@ object CommandLineParser { given FromString[Double] with def fromString(s: String) = s.toDouble - } -} diff --git a/library/src/scala/util/FromDigits.scala b/library/src/scala/util/FromDigits.scala index 1577f4103e03..c91a53ccb275 100644 --- a/library/src/scala/util/FromDigits.scala +++ b/library/src/scala/util/FromDigits.scala @@ -6,7 +6,7 @@ import annotation.internal.sharable /** A type class for types that admit numeric literals. */ -trait FromDigits[T] { +trait FromDigits[T]: /** Convert `digits` string to value of type `T` * `digits` can contain @@ -19,21 +19,19 @@ trait FromDigits[T] { * floating point literal that produces a zero value) */ def fromDigits(digits: String): T -} -object FromDigits { +object FromDigits: /** A subclass of `FromDigits` that also allows to convert whole number literals * with a radix other than 10 */ - trait WithRadix[T] extends FromDigits[T] { + trait WithRadix[T] extends FromDigits[T]: def fromDigits(digits: String): T = fromDigits(digits, 10) /** Convert digits string with given radix to number of type `T`. * E.g. if radix is 16, digits `a..f` and `A..F` are also allowed. */ def fromDigits(digits: String, radix: Int): T - } /** A subclass of `FromDigits` that also allows to convert number * literals containing a decimal point ".". @@ -68,18 +66,17 @@ object FromDigits { * since these do not handle unsigned hex numbers greater than the maximal value * correctly. */ - private def integerFromDigits(digits: String, radix: Int, limit: Long): Long = { + private def integerFromDigits(digits: String, radix: Int, limit: Long): Long = var value: Long = 0 val divider = if (radix == 10) 1 else 2 var i = 0 var negated = false val len = digits.length - if (0 < len && (digits(0) == '-' || digits(0) == '+')) { + if (0 < len && (digits(0) == '-' || digits(0) == '+')) negated = digits(0) == '-' i += 1 - } if (i == len) throw MalformedNumber() - while (i < len) { + while (i < len) val ch = digits(i) val d = if (ch <= '9') ch - '0' @@ -93,9 +90,7 @@ object FromDigits { !(negated && limit == value * radix - 1 + d)) throw NumberTooLarge() value = value * radix + d i += 1 - } if (negated) -value else value - } /** Convert digit string to Int number * @param digits The string to convert @@ -128,16 +123,14 @@ object FromDigits { * string contains non-zero digits before the exponent. * @throws MalformedNumber if digits is not a legal digit string for floating point numbers. */ - def floatFromDigits(digits: String): Float = { + def floatFromDigits(digits: String): Float = val x: Float = try java.lang.Float.parseFloat(digits) - catch { + catch case ex: NumberFormatException => throw MalformedNumber() - } if (x.isInfinite) throw NumberTooLarge() if (x == 0.0f && !zeroFloat.pattern.matcher(digits).nn.matches) throw NumberTooSmall() x - } /** Convert digit string to Double number * @param digits The string to convert @@ -146,22 +139,17 @@ object FromDigits { * string contains non-zero digits before the exponent. * @throws MalformedNumber if digits is not a legal digit string for floating point numbers.. */ - def doubleFromDigits(digits: String): Double = { + def doubleFromDigits(digits: String): Double = val x: Double = try java.lang.Double.parseDouble(digits) - catch { + catch case ex: NumberFormatException => throw MalformedNumber() - } if (x.isInfinite) throw NumberTooLarge() if (x == 0.0d && !zeroFloat.pattern.matcher(digits).nn.matches) throw NumberTooSmall() x - } - given BigIntFromDigits: WithRadix[BigInt] with { + given BigIntFromDigits: WithRadix[BigInt] with def fromDigits(digits: String, radix: Int): BigInt = BigInt(digits, radix) - } - given BigDecimalFromDigits: Floating[BigDecimal] with { + given BigDecimalFromDigits: Floating[BigDecimal] with def fromDigits(digits: String): BigDecimal = BigDecimal(digits) - } -} diff --git a/library/src/scala/util/NotGiven.scala b/library/src/scala/util/NotGiven.scala index 973e709042cb..59b71033484c 100644 --- a/library/src/scala/util/NotGiven.scala +++ b/library/src/scala/util/NotGiven.scala @@ -24,12 +24,11 @@ package scala.util */ final class NotGiven[+T] private () -trait LowPriorityNotGiven { +trait LowPriorityNotGiven: /** A fallback method used to emulate negation in Scala 2 */ given default[T]: NotGiven[T] = NotGiven.value -} -object NotGiven extends LowPriorityNotGiven { +object NotGiven extends LowPriorityNotGiven: private val cachedValue = new NotGiven[Nothing]() @@ -44,4 +43,3 @@ object NotGiven extends LowPriorityNotGiven { /** One of two ambiguous methods used to emulate negation in Scala 2 */ given amb2[T](using ev: T): NotGiven[T] = ??? -} diff --git a/library/src/scala/util/control/NonLocalReturns.scala b/library/src/scala/util/control/NonLocalReturns.scala index ad4dc05f36ac..cd4604354035 100644 --- a/library/src/scala/util/control/NonLocalReturns.scala +++ b/library/src/scala/util/control/NonLocalReturns.scala @@ -18,16 +18,14 @@ package scala.util.control * rewritten by the compiler to jumps. */ @deprecated("Use scala.util.boundary instead", "3.3") -object NonLocalReturns { +object NonLocalReturns: @deprecated("Use scala.util.boundary.Break instead", "3.3") - class ReturnThrowable[T] extends ControlThrowable { + class ReturnThrowable[T] extends ControlThrowable: private var myResult: T = _ - def throwReturn(result: T): Nothing = { + def throwReturn(result: T): Nothing = myResult = result throw this - } def result: T = myResult - } /** Performs a nonlocal return by throwing an exception. */ @deprecated("Use scala.util.boundary.break instead", "3.3") @@ -36,12 +34,9 @@ object NonLocalReturns { /** Enable nonlocal returns in `op`. */ @deprecated("Use scala.util.boundary instead", "3.3") - def returning[T](op: ReturnThrowable[T] ?=> T): T = { + def returning[T](op: ReturnThrowable[T] ?=> T): T = val returner = new ReturnThrowable[T] try op(using returner) - catch { + catch case ex: ReturnThrowable[T] => if (ex.eq(returner)) ex.result else throw ex - } - } -} From e674eaad1c83c8cd87c325d0e522815e6a8b24f7 Mon Sep 17 00:00:00 2001 From: Adrien Piquerez Date: Tue, 30 May 2023 13:26:53 +0200 Subject: [PATCH 5/5] Rewrite scala3-compiler to indent --- .../src/dotty/tools/MainGenericCompiler.scala | 6 +- .../src/dotty/tools/MainGenericRunner.scala | 12 +- .../tools/backend/ScalaPrimitivesOps.scala | 30 +- .../tools/backend/WorklistAlgorithm.scala | 12 +- .../dotty/tools/backend/jvm/AsmUtils.scala | 15 +- .../tools/backend/jvm/BCodeAsmCommon.scala | 55 +- .../tools/backend/jvm/BCodeBodyBuilder.scala | 976 +++++------- .../tools/backend/jvm/BCodeHelpers.scala | 238 +-- .../tools/backend/jvm/BCodeIdiomatic.scala | 268 ++-- .../tools/backend/jvm/BCodeSkelBuilder.scala | 312 ++-- .../tools/backend/jvm/BCodeSyncAndTry.scala | 247 ++- .../src/dotty/tools/backend/jvm/BTypes.scala | 135 +- .../tools/backend/jvm/BTypesFromSymbols.scala | 75 +- .../tools/backend/jvm/BackendUtils.scala | 48 +- .../tools/backend/jvm/ClassfileWriter.scala | 52 +- .../src/dotty/tools/backend/jvm/CodeGen.scala | 42 +- .../tools/backend/jvm/CollectSuperCalls.scala | 15 +- .../dotty/tools/backend/jvm/CoreBTypes.scala | 69 +- .../backend/jvm/DottyBackendInterface.scala | 64 +- .../dotty/tools/backend/jvm/GenBCode.scala | 30 +- .../dotty/tools/backend/jvm/GenBCodeOps.scala | 3 +- .../backend/jvm/GenericSignatureVisitor.scala | 129 +- .../tools/backend/jvm/PostProcessor.scala | 33 +- .../jvm/PostProcessorFrontendAccess.scala | 21 +- .../dotty/tools/backend/jvm/Primitives.scala | 54 +- .../tools/backend/jvm/scalaPrimitives.scala | 38 +- .../dotty/tools/backend/sjs/GenSJSIR.scala | 3 +- .../dotty/tools/backend/sjs/JSCodeGen.scala | 1404 ++++++----------- .../tools/backend/sjs/JSDefinitions.scala | 18 +- .../dotty/tools/backend/sjs/JSEncoding.scala | 107 +- .../tools/backend/sjs/JSExportsGen.scala | 344 ++-- .../dotty/tools/backend/sjs/JSPositions.scala | 40 +- .../tools/backend/sjs/JSPrimitives.scala | 20 +- .../dotty/tools/backend/sjs/ScopedVar.scala | 21 +- compiler/src/dotty/tools/dotc/Bench.scala | 3 +- .../dotty/tools/dotc/CompilationUnit.scala | 30 +- compiler/src/dotty/tools/dotc/Compiler.scala | 12 +- compiler/src/dotty/tools/dotc/Driver.scala | 21 +- compiler/src/dotty/tools/dotc/Resident.scala | 18 +- compiler/src/dotty/tools/dotc/Run.scala | 65 +- .../src/dotty/tools/dotc/ast/Desugar.scala | 386 ++--- .../dotty/tools/dotc/ast/DesugarEnums.scala | 57 +- .../dotty/tools/dotc/ast/MainProxies.scala | 72 +- .../dotty/tools/dotc/ast/NavigateAST.scala | 39 +- .../src/dotty/tools/dotc/ast/Positioned.scala | 54 +- .../src/dotty/tools/dotc/ast/TreeInfo.scala | 276 ++-- .../tools/dotc/ast/TreeMapWithImplicits.scala | 27 +- .../dotty/tools/dotc/ast/TreeTypeMap.scala | 33 +- compiler/src/dotty/tools/dotc/ast/Trees.scala | 491 ++---- compiler/src/dotty/tools/dotc/ast/tpd.scala | 309 ++-- compiler/src/dotty/tools/dotc/ast/untpd.scala | 174 +- .../tools/dotc/cc/CaptureAnnotation.scala | 6 +- .../src/dotty/tools/dotc/cc/CaptureSet.scala | 32 +- .../dotty/tools/dotc/cc/CheckCaptures.scala | 37 +- compiler/src/dotty/tools/dotc/cc/Setup.scala | 2 +- .../src/dotty/tools/dotc/cc/Synthetics.scala | 3 +- .../dotc/classpath/AggregateClassPath.scala | 61 +- .../tools/dotc/classpath/ClassPath.scala | 39 +- .../dotc/classpath/ClassPathFactory.scala | 12 +- .../dotc/classpath/DirectoryClassPath.scala | 109 +- .../tools/dotc/classpath/FileUtils.scala | 12 +- .../dotc/classpath/PackageNameUtils.scala | 9 +- .../classpath/VirtualDirectoryClassPath.scala | 15 +- .../ZipAndJarFileLookupFactory.scala | 57 +- .../dotc/classpath/ZipArchiveFileLookup.scala | 12 +- .../dotty/tools/dotc/config/CliCommand.scala | 6 +- .../src/dotty/tools/dotc/config/Config.scala | 3 +- .../tools/dotc/config/JavaPlatform.scala | 12 +- .../dotty/tools/dotc/config/OutputDirs.scala | 30 +- .../tools/dotc/config/PathResolver.scala | 48 +- .../dotty/tools/dotc/config/Platform.scala | 3 +- .../dotty/tools/dotc/config/Printers.scala | 9 +- .../dotty/tools/dotc/config/Properties.scala | 12 +- .../dotty/tools/dotc/config/SJSPlatform.scala | 6 +- .../tools/dotc/config/ScalaSettings.scala | 6 +- .../tools/dotc/config/ScalaVersion.scala | 69 +- .../dotty/tools/dotc/config/Settings.scala | 31 +- .../tools/dotc/config/WrappedProperties.scala | 15 +- .../dotty/tools/dotc/core/Annotations.scala | 73 +- .../tools/dotc/core/CheckRealizable.scala | 51 +- .../src/dotty/tools/dotc/core/Comments.scala | 114 +- .../src/dotty/tools/dotc/core/Constants.scala | 54 +- .../dotty/tools/dotc/core/Constraint.scala | 3 +- .../tools/dotc/core/ConstraintHandling.scala | 59 +- .../tools/dotc/core/ConstraintRunInfo.scala | 3 +- .../dotty/tools/dotc/core/ContextOps.scala | 32 +- .../src/dotty/tools/dotc/core/Contexts.scala | 69 +- .../dotty/tools/dotc/core/Decorators.scala | 53 +- .../dotty/tools/dotc/core/Definitions.scala | 156 +- .../tools/dotc/core/DenotTransformers.scala | 27 +- .../dotty/tools/dotc/core/Denotations.scala | 189 +-- .../src/dotty/tools/dotc/core/Flags.scala | 39 +- .../tools/dotc/core/GadtConstraint.scala | 30 +- .../src/dotty/tools/dotc/core/Hashable.scala | 24 +- .../tools/dotc/core/JavaNullInterop.scala | 12 +- .../tools/dotc/core/MacroClassLoader.scala | 6 +- .../tools/dotc/core/MatchTypeTrace.scala | 3 +- compiler/src/dotty/tools/dotc/core/Mode.scala | 9 +- .../src/dotty/tools/dotc/core/NameKinds.scala | 123 +- .../src/dotty/tools/dotc/core/NameOps.scala | 81 +- .../src/dotty/tools/dotc/core/NameTags.scala | 6 +- .../src/dotty/tools/dotc/core/NamerOps.scala | 4 +- .../src/dotty/tools/dotc/core/Names.scala | 132 +- .../tools/dotc/core/NullOpsDecorator.scala | 12 +- .../tools/dotc/core/OrderingConstraint.scala | 123 +- .../src/dotty/tools/dotc/core/ParamInfo.scala | 6 +- .../dotc/core/PatternTypeConstrainer.scala | 87 +- .../src/dotty/tools/dotc/core/Periods.scala | 12 +- .../src/dotty/tools/dotc/core/Phases.scala | 66 +- .../src/dotty/tools/dotc/core/Scopes.scala | 144 +- .../src/dotty/tools/dotc/core/Signature.scala | 39 +- .../src/dotty/tools/dotc/core/StdNames.scala | 42 +- .../dotty/tools/dotc/core/Substituters.scala | 75 +- .../tools/dotc/core/SymDenotations.scala | 553 +++---- .../dotty/tools/dotc/core/SymbolLoaders.scala | 111 +- .../src/dotty/tools/dotc/core/Symbols.scala | 169 +- .../tools/dotc/core/TypeApplications.scala | 104 +- .../dotty/tools/dotc/core/TypeComparer.scala | 563 +++---- .../dotty/tools/dotc/core/TypeErasure.scala | 111 +- .../dotty/tools/dotc/core/TypeErrors.scala | 15 +- .../src/dotty/tools/dotc/core/TypeEval.scala | 3 +- .../src/dotty/tools/dotc/core/TypeOps.scala | 210 +-- .../dotty/tools/dotc/core/TyperState.scala | 17 +- .../src/dotty/tools/dotc/core/Types.scala | 1301 +++++---------- .../src/dotty/tools/dotc/core/Variances.scala | 6 +- .../core/classfile/AbstractFileReader.scala | 12 +- .../dotc/core/classfile/ByteCodecs.scala | 87 +- .../core/classfile/ClassfileConstants.scala | 21 +- .../dotc/core/classfile/ClassfileParser.scala | 426 ++--- .../dotc/core/classfile/DataReader.scala | 3 +- .../core/classfile/ReusableDataReader.scala | 91 +- .../dotc/core/tasty/CommentUnpickler.scala | 12 +- .../dotc/core/tasty/DottyUnpickler.scala | 18 +- .../tools/dotc/core/tasty/NameBuffer.scala | 39 +- .../dotc/core/tasty/PositionPickler.scala | 27 +- .../dotc/core/tasty/PositionUnpickler.scala | 27 +- .../dotc/core/tasty/TastyAnsiiPrinter.scala | 3 +- .../dotc/core/tasty/TastyClassName.scala | 24 +- .../dotc/core/tasty/TastyHTMLPrinter.scala | 3 +- .../tools/dotc/core/tasty/TastyPickler.scala | 18 +- .../tools/dotc/core/tasty/TastyPrinter.scala | 78 +- .../dotc/core/tasty/TastyUnpickler.scala | 27 +- .../tools/dotc/core/tasty/TreeBuffer.scala | 42 +- .../tools/dotc/core/tasty/TreePickler.scala | 246 +-- .../tools/dotc/core/tasty/TreeUnpickler.scala | 336 ++-- .../core/unpickleScala2/PickleBuffer.scala | 69 +- .../core/unpickleScala2/PickleFormat.scala | 3 +- .../core/unpickleScala2/Scala2Erasure.scala | 23 +- .../core/unpickleScala2/Scala2Flags.scala | 3 +- .../core/unpickleScala2/Scala2Unpickler.scala | 345 ++-- .../decompiler/DecompilationPrinter.scala | 15 +- .../dotc/decompiler/IDEDecompilerDriver.scala | 12 +- .../dotty/tools/dotc/decompiler/Main.scala | 9 +- .../decompiler/PartialTASTYDecompiler.scala | 3 +- .../dotc/decompiler/TASTYDecompiler.scala | 3 +- .../dotty/tools/dotc/fromtasty/Debug.scala | 17 +- .../tools/dotc/fromtasty/ReadTasty.scala | 30 +- .../dotc/fromtasty/TASTYCompilationUnit.scala | 3 +- .../tools/dotc/fromtasty/TASTYCompiler.scala | 6 +- .../dotty/tools/dotc/fromtasty/TASTYRun.scala | 6 +- .../tools/dotc/fromtasty/TastyFileUtil.scala | 9 +- .../tools/dotc/inlines/InlineReducer.scala | 132 +- .../dotty/tools/dotc/inlines/Inliner.scala | 167 +- .../dotty/tools/dotc/inlines/Inlines.scala | 45 +- .../dotc/inlines/PrepareInlineable.scala | 54 +- .../tools/dotc/interactive/Completion.scala | 57 +- .../tools/dotc/interactive/Interactive.scala | 86 +- .../interactive/InteractiveCompiler.scala | 3 +- .../dotc/interactive/InteractiveDriver.scala | 93 +- .../tools/dotc/interactive/SourceTree.scala | 30 +- .../tools/dotc/parsing/CharArrayReader.scala | 33 +- .../tools/dotc/parsing/JavaParsers.scala | 432 ++--- .../tools/dotc/parsing/JavaScanners.scala | 256 +-- .../dotty/tools/dotc/parsing/JavaTokens.scala | 3 +- .../tools/dotc/parsing/ParserPhase.scala | 15 +- .../dotty/tools/dotc/parsing/Parsers.scala | 844 ++++------ .../dotty/tools/dotc/parsing/Scanners.scala | 320 ++-- .../tools/dotc/parsing/ScriptParsers.scala | 6 +- .../src/dotty/tools/dotc/parsing/Tokens.scala | 19 +- .../dotty/tools/dotc/parsing/package.scala | 9 +- .../dotc/parsing/xml/MarkupParserCommon.scala | 59 +- .../dotc/parsing/xml/MarkupParsers.scala | 149 +- .../dotc/parsing/xml/SymbolicXMLBuilder.scala | 65 +- .../tools/dotc/parsing/xml/Utility.scala | 48 +- .../src/dotty/tools/dotc/plugins/Plugin.scala | 54 +- .../dotty/tools/dotc/plugins/Plugins.scala | 51 +- .../tools/dotc/printing/Formatting.scala | 18 +- .../tools/dotc/printing/Highlighting.scala | 18 +- .../tools/dotc/printing/PlainPrinter.scala | 105 +- .../dotty/tools/dotc/printing/Printer.scala | 9 +- .../tools/dotc/printing/RefinedPrinter.scala | 290 ++-- .../tools/dotc/printing/ReplPrinter.scala | 15 +- .../dotty/tools/dotc/printing/Showable.scala | 3 +- .../dotc/printing/SyntaxHighlighting.scala | 33 +- .../src/dotty/tools/dotc/printing/Texts.scala | 71 +- .../dotty/tools/dotc/printing/package.scala | 3 +- .../tools/dotc/profile/AsyncHelper.scala | 70 +- .../dotty/tools/dotc/profile/Profiler.scala | 81 +- .../dotty/tools/dotc/quoted/Interpreter.scala | 83 +- .../tools/dotc/quoted/MacroExpansion.scala | 3 +- .../tools/dotc/quoted/PickledQuotes.scala | 63 +- .../dotty/tools/dotc/quoted/QuoteUtils.scala | 12 +- .../dotty/tools/dotc/quoted/QuotesCache.scala | 3 +- .../dotty/tools/dotc/quoted/TastyString.scala | 9 +- .../dotc/quoted/reflect/FromSymbol.scala | 24 +- compiler/src/dotty/tools/dotc/report.scala | 10 +- .../dotc/reporting/ConsoleReporter.scala | 15 +- .../tools/dotc/reporting/Diagnostic.scala | 15 +- .../reporting/HideNonSensicalMessages.scala | 6 +- .../dotty/tools/dotc/reporting/Message.scala | 58 +- .../dotc/reporting/MessageRendering.scala | 51 +- .../dotty/tools/dotc/reporting/Reporter.scala | 52 +- .../tools/dotc/reporting/StoreReporter.scala | 6 +- .../dotc/reporting/ThrowingReporter.scala | 6 +- .../reporting/UniqueMessagePositions.scala | 3 +- .../dotty/tools/dotc/reporting/WConf.scala | 38 +- .../dotty/tools/dotc/reporting/messages.scala | 591 +++---- .../dotty/tools/dotc/reporting/trace.scala | 3 +- .../dotty/tools/dotc/rewrites/Rewrites.scala | 27 +- .../src/dotty/tools/dotc/sbt/APIUtils.scala | 15 +- .../src/dotty/tools/dotc/sbt/ExtractAPI.scala | 159 +- .../tools/dotc/sbt/ExtractDependencies.scala | 145 +- .../src/dotty/tools/dotc/sbt/ShowAPI.scala | 35 +- .../dotty/tools/dotc/sbt/ThunkHolder.scala | 9 +- .../tools/dotc/semanticdb/ConstantOps.scala | 3 +- .../tools/dotc/semanticdb/Descriptor.scala | 66 +- .../dotc/semanticdb/ExtractSemanticDB.scala | 13 +- .../dotty/tools/dotc/semanticdb/PPrint.scala | 41 +- .../dotty/tools/dotc/semanticdb/Scala3.scala | 11 +- .../dotty/tools/dotc/semanticdb/TypeOps.scala | 48 +- .../dotc/semanticdb/generated/Access.scala | 210 +-- .../semanticdb/generated/Annotation.scala | 33 +- .../dotc/semanticdb/generated/Constant.scala | 408 ++--- .../semanticdb/generated/Diagnostic.scala | 63 +- .../semanticdb/generated/Documentation.scala | 63 +- .../dotc/semanticdb/generated/Language.scala | 18 +- .../dotc/semanticdb/generated/Location.scala | 33 +- .../dotc/semanticdb/generated/Range.scala | 51 +- .../dotc/semanticdb/generated/Schema.scala | 18 +- .../dotc/semanticdb/generated/Scope.scala | 27 +- .../dotc/semanticdb/generated/Signature.scala | 198 +-- .../generated/SymbolInformation.scala | 198 +-- .../generated/SymbolOccurrence.scala | 57 +- .../dotc/semanticdb/generated/Synthetic.scala | 33 +- .../semanticdb/generated/TextDocument.scala | 57 +- .../semanticdb/generated/TextDocuments.scala | 27 +- .../dotc/semanticdb/generated/Tree.scala | 336 ++-- .../dotc/semanticdb/generated/Type.scala | 663 +++----- .../InvalidProtocolBufferException.scala | 33 +- .../dotc/semanticdb/internal/LiteParser.scala | 9 +- .../tools/dotc/semanticdb/internal/MD5.scala | 15 +- .../semanticdb/internal/SemanticdbEnum.scala | 3 +- .../SemanticdbGeneratedMessageCompanion.scala | 24 +- .../internal/SemanticdbInputStream.scala | 394 ++--- .../internal/SemanticdbMessage.scala | 3 +- .../internal/SemanticdbOutputStream.scala | 333 ++-- .../internal/SemanticdbTypeMapper.scala | 9 +- .../dotc/semanticdb/internal/WireFormat.scala | 3 +- .../tools/dotc/staging/CrossStageSafety.scala | 6 +- .../dotty/tools/dotc/staging/HealType.scala | 9 +- .../tools/dotc/staging/StagingLevel.scala | 3 +- .../dotc/staging/TreeMapWithStages.scala | 9 +- .../tools/dotc/transform/AccessProxies.scala | 53 +- .../tools/dotc/transform/ArrayApply.scala | 18 +- .../dotc/transform/ArrayConstructors.scala | 15 +- .../tools/dotc/transform/BeanProperties.scala | 7 +- .../dotty/tools/dotc/transform/Bridges.scala | 24 +- .../tools/dotc/transform/CapturedVars.scala | 45 +- .../dotc/transform/CheckNoSuperThis.scala | 29 +- .../tools/dotc/transform/CheckReentrant.scala | 21 +- .../tools/dotc/transform/CheckStatic.scala | 12 +- .../tools/dotc/transform/CheckUnused.scala | 22 +- .../dotc/transform/CollectEntryPoints.scala | 6 +- .../transform/CollectNullableFields.scala | 18 +- .../dotc/transform/CompleteJavaEnums.scala | 27 +- .../tools/dotc/transform/Constructors.scala | 88 +- .../tools/dotc/transform/CookComments.scala | 9 +- .../dotc/transform/CountOuterAccesses.scala | 3 +- .../dotty/tools/dotc/transform/CtxLazy.scala | 9 +- .../tools/dotc/transform/Dependencies.scala | 6 +- .../tools/dotc/transform/ElimByName.scala | 12 +- .../dotc/transform/ElimErasedValueType.scala | 35 +- .../tools/dotc/transform/ElimOpaque.scala | 15 +- .../dotc/transform/ElimOuterSelect.scala | 6 +- .../dotc/transform/ElimPackagePrefixes.scala | 9 +- .../dotc/transform/ElimPolyFunction.scala | 18 +- .../tools/dotc/transform/ElimRepeated.scala | 10 +- .../tools/dotc/transform/ElimStaticThis.scala | 9 +- .../dotty/tools/dotc/transform/Erasure.scala | 179 +-- .../tools/dotc/transform/EtaReduce.scala | 2 +- .../tools/dotc/transform/ExpandPrivate.scala | 24 +- .../tools/dotc/transform/ExpandSAMs.scala | 21 +- .../tools/dotc/transform/ExplicitOuter.scala | 75 +- .../tools/dotc/transform/ExplicitSelf.scala | 9 +- .../dotc/transform/ExtensionMethods.scala | 37 +- .../tools/dotc/transform/FirstTransform.scala | 48 +- .../dotty/tools/dotc/transform/Flatten.scala | 6 +- .../dotc/transform/ForwardDepChecks.scala | 21 +- .../dotc/transform/FullParameterization.scala | 55 +- .../transform/FunctionXXLForwarders.scala | 12 +- .../dotc/transform/GenericSignatures.scala | 105 +- .../dotty/tools/dotc/transform/Getters.scala | 9 +- .../tools/dotc/transform/HoistSuperArgs.scala | 47 +- .../tools/dotc/transform/InlineVals.scala | 3 +- .../dotty/tools/dotc/transform/Inlining.scala | 15 +- .../dotc/transform/InstrumentCoverage.scala | 3 +- .../dotc/transform/Instrumentation.scala | 9 +- .../dotc/transform/InterceptedMethods.scala | 24 +- .../tools/dotc/transform/LambdaLift.scala | 63 +- .../dotty/tools/dotc/transform/LazyVals.scala | 93 +- .../dotty/tools/dotc/transform/LiftTry.scala | 3 +- .../tools/dotc/transform/MacroTransform.scala | 12 +- .../tools/dotc/transform/MegaPhase.scala | 486 ++---- .../dotty/tools/dotc/transform/Memoize.scala | 26 +- .../dotty/tools/dotc/transform/Mixin.scala | 38 +- .../dotty/tools/dotc/transform/MixinOps.scala | 17 +- .../tools/dotc/transform/MoveStatics.scala | 30 +- .../dotc/transform/NonLocalReturns.scala | 9 +- .../dotc/transform/OverridingPairs.scala | 18 +- .../tools/dotc/transform/PatternMatcher.scala | 318 ++-- .../tools/dotc/transform/PickleQuotes.scala | 51 +- .../dotty/tools/dotc/transform/Pickler.scala | 36 +- .../tools/dotc/transform/PostTyper.scala | 66 +- .../dotc/transform/ProtectedAccessors.scala | 21 +- .../dotc/transform/PruneErasedDefs.scala | 3 +- .../tools/dotc/transform/PureStats.scala | 9 +- .../dotty/tools/dotc/transform/Recheck.scala | 12 +- .../transform/RepeatableAnnotations.scala | 3 +- .../tools/dotc/transform/ResolveSuper.scala | 21 +- .../tools/dotc/transform/RestoreScopes.scala | 6 +- .../tools/dotc/transform/SelectStatic.scala | 12 +- .../tools/dotc/transform/SeqLiterals.scala | 9 +- .../tools/dotc/transform/SetRootTree.scala | 21 +- .../transform/SpecializeApplyMethods.scala | 24 +- .../dotc/transform/SpecializeFunctions.scala | 12 +- .../dotty/tools/dotc/transform/Splicer.scala | 41 +- .../dotty/tools/dotc/transform/Splicing.scala | 39 +- .../dotty/tools/dotc/transform/Staging.scala | 27 +- .../tools/dotc/transform/SuperAccessors.scala | 42 +- .../dotty/tools/dotc/transform/SymUtils.scala | 27 +- .../dotc/transform/SyntheticMembers.scala | 80 +- .../dotty/tools/dotc/transform/TailRec.scala | 78 +- .../dotc/transform/TransformWildcards.scala | 6 +- .../tools/dotc/transform/TreeChecker.scala | 164 +- .../tools/dotc/transform/TreeExtractors.scala | 21 +- .../dotc/transform/TryCatchPatterns.scala | 21 +- .../dotc/transform/TupleOptimizations.scala | 51 +- .../tools/dotc/transform/TypeTestsCasts.scala | 66 +- .../tools/dotc/transform/TypeUtils.scala | 21 +- .../dotc/transform/VCElideAllocations.scala | 9 +- .../dotc/transform/VCInlineMethods.scala | 9 +- .../tools/dotc/transform/ValueClasses.scala | 6 +- .../dotc/transform/YCheckPositions.scala | 18 +- .../tools/dotc/transform/init/Checker.scala | 3 +- .../tools/dotc/transform/init/Errors.scala | 3 +- .../tools/dotc/transform/init/Semantic.scala | 117 +- .../tools/dotc/transform/init/Trace.scala | 3 +- .../tools/dotc/transform/init/Util.scala | 9 +- .../transform/localopt/FormatChecker.scala | 9 +- .../localopt/StringInterpolatorOpt.scala | 4 +- .../tools/dotc/transform/patmat/Space.scala | 159 +- .../transform/sjs/AddLocalJSFakeNews.scala | 20 +- .../transform/sjs/ExplicitJSClasses.scala | 191 +-- .../dotc/transform/sjs/JSExportUtils.scala | 9 +- .../tools/dotc/transform/sjs/JSSymUtils.scala | 84 +- .../transform/sjs/JUnitBootstrappers.scala | 74 +- .../dotc/transform/sjs/PrepJSExports.scala | 179 +-- .../dotc/transform/sjs/PrepJSInterop.scala | 432 ++--- .../dotty/tools/dotc/typer/Applications.scala | 745 ++++----- .../src/dotty/tools/dotc/typer/Checking.scala | 287 ++-- .../dotty/tools/dotc/typer/ConstFold.scala | 23 +- .../tools/dotc/typer/CrossVersionChecks.scala | 24 +- .../src/dotty/tools/dotc/typer/Deriving.scala | 51 +- .../dotty/tools/dotc/typer/Docstrings.scala | 12 +- .../src/dotty/tools/dotc/typer/Dynamic.scala | 64 +- .../tools/dotc/typer/ErrorReporting.scala | 33 +- .../dotty/tools/dotc/typer/EtaExpansion.scala | 45 +- .../dotty/tools/dotc/typer/Implicits.scala | 275 ++-- .../dotty/tools/dotc/typer/ImportInfo.scala | 18 +- .../tools/dotc/typer/ImportSuggestions.scala | 15 +- .../dotty/tools/dotc/typer/Inferencing.scala | 162 +- .../dotty/tools/dotc/typer/JavaChecks.scala | 6 +- .../src/dotty/tools/dotc/typer/Namer.scala | 295 ++-- .../dotty/tools/dotc/typer/ProtoTypes.scala | 135 +- .../tools/dotc/typer/QuotesAndSplices.scala | 81 +- .../src/dotty/tools/dotc/typer/ReTyper.scala | 24 +- .../dotty/tools/dotc/typer/RefChecks.scala | 159 +- .../dotty/tools/dotc/typer/Synthesizer.scala | 70 +- .../dotty/tools/dotc/typer/TypeAssigner.scala | 101 +- .../src/dotty/tools/dotc/typer/Typer.scala | 892 ++++------- .../dotty/tools/dotc/typer/TyperPhase.scala | 21 +- .../tools/dotc/typer/VarianceChecker.scala | 60 +- .../dotty/tools/dotc/util/Attachment.scala | 51 +- .../src/dotty/tools/dotc/util/Chars.scala | 27 +- .../dotc/util/ClasspathFromClassloader.scala | 15 +- .../tools/dotc/util/CommentParsing.scala | 57 +- .../src/dotty/tools/dotc/util/DiffUtil.scala | 63 +- .../tools/dotc/util/FreshNameCreator.scala | 12 +- .../src/dotty/tools/dotc/util/HashSet.scala | 3 +- .../src/dotty/tools/dotc/util/LRUCache.scala | 30 +- .../tools/dotc/util/NameTransformer.scala | 65 +- .../dotty/tools/dotc/util/ParsedComment.scala | 30 +- .../src/dotty/tools/dotc/util/Property.scala | 3 +- .../tools/dotc/util/ReusableInstance.scala | 9 +- .../dotty/tools/dotc/util/ShowPickled.scala | 51 +- .../dotty/tools/dotc/util/Signatures.scala | 31 +- .../tools/dotc/util/SimpleIdentityMap.scala | 99 +- .../tools/dotc/util/SimpleIdentitySet.scala | 81 +- .../tools/dotc/util/SixteenNibbles.scala | 6 +- .../dotty/tools/dotc/util/SourceFile.scala | 54 +- .../tools/dotc/util/SourcePosition.scala | 12 +- .../src/dotty/tools/dotc/util/Spans.scala | 33 +- .../src/dotty/tools/dotc/util/Stats.scala | 18 +- .../src/dotty/tools/dotc/util/Store.scala | 15 +- compiler/src/dotty/tools/dotc/util/Util.scala | 6 +- .../dotty/tools/dotc/util/WeakHashSet.scala | 87 +- .../src/dotty/tools/dotc/util/common.scala | 3 +- .../dotty/tools/dotc/util/concurrent.scala | 15 +- .../src/dotty/tools/io/AbstractFile.scala | 44 +- compiler/src/dotty/tools/io/ClassPath.scala | 45 +- compiler/src/dotty/tools/io/Directory.scala | 12 +- compiler/src/dotty/tools/io/File.scala | 18 +- compiler/src/dotty/tools/io/Jar.scala | 60 +- compiler/src/dotty/tools/io/JarArchive.scala | 18 +- .../src/dotty/tools/io/NoAbstractFile.scala | 3 +- compiler/src/dotty/tools/io/Path.scala | 52 +- compiler/src/dotty/tools/io/PlainFile.scala | 40 +- compiler/src/dotty/tools/io/Streamable.scala | 30 +- .../src/dotty/tools/io/VirtualDirectory.scala | 15 +- compiler/src/dotty/tools/io/VirtualFile.scala | 21 +- compiler/src/dotty/tools/io/ZipArchive.scala | 110 +- compiler/src/dotty/tools/io/package.scala | 3 +- compiler/src/dotty/tools/package.scala | 6 +- .../tools/repl/AbstractFileClassLoader.scala | 21 +- .../tools/repl/CollectTopLevelImports.scala | 9 +- .../src/dotty/tools/repl/JLineTerminal.scala | 36 +- compiler/src/dotty/tools/repl/Main.scala | 3 +- .../src/dotty/tools/repl/ParseResult.scala | 70 +- compiler/src/dotty/tools/repl/Rendering.scala | 20 +- .../src/dotty/tools/repl/ReplCompiler.scala | 63 +- .../src/dotty/tools/repl/ReplDriver.scala | 139 +- .../src/dotty/tools/repl/ScriptEngine.scala | 21 +- compiler/src/dotty/tools/repl/package.scala | 3 +- compiler/src/dotty/tools/repl/results.scala | 3 +- .../src/dotty/tools/runner/ObjectRunner.scala | 9 +- .../dotty/tools/runner/ScalaClassLoader.scala | 15 +- compiler/src/dotty/tools/scripting/Main.scala | 12 +- .../scala/quoted/runtime/impl/ExprImpl.scala | 6 +- .../quoted/runtime/impl/QuoteMatcher.scala | 42 +- .../quoted/runtime/impl/QuotesImpl.scala | 42 +- .../quoted/runtime/impl/ScopeException.scala | 2 +- .../quoted/runtime/impl/SpliceScope.scala | 3 +- .../scala/quoted/runtime/impl/TypeImpl.scala | 6 +- .../runtime/impl/printers/Extractors.scala | 54 +- .../runtime/impl/printers/SourceCode.scala | 477 ++---- .../impl/printers/SyntaxHighlight.scala | 12 +- 456 files changed, 11912 insertions(+), 22369 deletions(-) diff --git a/compiler/src/dotty/tools/MainGenericCompiler.scala b/compiler/src/dotty/tools/MainGenericCompiler.scala index aa924a237f73..fcbdd4a66b3c 100644 --- a/compiler/src/dotty/tools/MainGenericCompiler.scala +++ b/compiler/src/dotty/tools/MainGenericCompiler.scala @@ -40,7 +40,7 @@ case class CompileSettings( compiler: Boolean = false, quiet: Boolean = false, colors: Boolean = false, -) { +): def withCompileMode(em: CompileMode): CompileSettings = this.compileMode match case CompileMode.Guess => this.copy(compileMode = em) @@ -83,9 +83,8 @@ case class CompileSettings( def withNoColors: CompileSettings = this.copy(colors = false) -} -object MainGenericCompiler { +object MainGenericCompiler: val classpathSeparator = File.pathSeparator @@ -185,4 +184,3 @@ object MainGenericCompiler { run(settings) end main -} diff --git a/compiler/src/dotty/tools/MainGenericRunner.scala b/compiler/src/dotty/tools/MainGenericRunner.scala index 6f4366a00b77..261c856ce301 100644 --- a/compiler/src/dotty/tools/MainGenericRunner.scala +++ b/compiler/src/dotty/tools/MainGenericRunner.scala @@ -42,7 +42,7 @@ case class Settings( modeShouldBePossibleRun: Boolean = false, modeShouldBeRun: Boolean = false, compiler: Boolean = false, -) { +): def withExecuteMode(em: ExecuteMode): Settings = this.executeMode match case ExecuteMode.Guess | ExecuteMode.PossibleRun => this.copy(executeMode = em) @@ -94,9 +94,8 @@ case class Settings( def withCompiler: Settings = this.copy(compiler = true) -} -object MainGenericRunner { +object MainGenericRunner: val classpathSeparator = File.pathSeparator @@ -212,7 +211,7 @@ object MainGenericRunner { case ExecuteMode.Run => val scalaClasspath = ClasspathFromClassloader(Thread.currentThread().getContextClassLoader).split(classpathSeparator) val newClasspath = (settings.classPath.flatMap(_.split(classpathSeparator).filter(_.nonEmpty)) ++ removeCompiler(scalaClasspath) :+ ".").map(File(_).toURI.toURL) - ObjectRunner.runAndCatch(newClasspath, settings.targetToRun, settings.residualArgs).flatMap { + ObjectRunner.runAndCatch(newClasspath, settings.targetToRun, settings.residualArgs).flatMap: case ex: ClassNotFoundException if ex.getMessage == settings.targetToRun => val file = settings.targetToRun Jar(file).mainClass match @@ -221,7 +220,6 @@ object MainGenericRunner { case None => Some(IllegalArgumentException(s"No main class defined in manifest in jar: $file")) case ex => Some(ex) - } case ExecuteMode.Script => val targetScript = Paths.get(settings.targetScript).toFile @@ -250,10 +248,9 @@ object MainGenericRunner { scripting.Main.process(properArgs.toArray) case ExecuteMode.Expression => - val cp = settings.classPath match { + val cp = settings.classPath match case Nil => "" case list => list.mkString(classpathSeparator) - } val cpArgs = if cp.isEmpty then Nil else List("-classpath", cp) val properArgs = cpArgs ++ settings.residualArgs ++ settings.scalaArgs val driver = StringDriver(properArgs.toArray, settings.targetExpression) @@ -280,4 +277,3 @@ object MainGenericRunner { def main(args: Array[String]): Unit = if (!process(args)) System.exit(1) -} diff --git a/compiler/src/dotty/tools/backend/ScalaPrimitivesOps.scala b/compiler/src/dotty/tools/backend/ScalaPrimitivesOps.scala index 6b5bfbc3e00e..cf87b9c3785d 100644 --- a/compiler/src/dotty/tools/backend/ScalaPrimitivesOps.scala +++ b/compiler/src/dotty/tools/backend/ScalaPrimitivesOps.scala @@ -3,7 +3,7 @@ package backend object ScalaPrimitivesOps extends ScalaPrimitivesOps -class ScalaPrimitivesOps { +class ScalaPrimitivesOps: // Arithmetic unary operations inline val POS = 1 // +x inline val NEG = 2 // -x @@ -165,68 +165,58 @@ class ScalaPrimitivesOps { def isArrayOp(code: Int): Boolean = isArrayNew(code) | isArrayLength(code) | isArrayGet(code) | isArraySet(code) - def isArrayNew(code: Int): Boolean = code match { + def isArrayNew(code: Int): Boolean = code match case NEW_ZARRAY | NEW_BARRAY | NEW_SARRAY | NEW_CARRAY | NEW_IARRAY | NEW_LARRAY | NEW_FARRAY | NEW_DARRAY | NEW_OARRAY => true case _ => false - } - def isArrayLength(code: Int): Boolean = code match { + def isArrayLength(code: Int): Boolean = code match case ZARRAY_LENGTH | BARRAY_LENGTH | SARRAY_LENGTH | CARRAY_LENGTH | IARRAY_LENGTH | LARRAY_LENGTH | FARRAY_LENGTH | DARRAY_LENGTH | OARRAY_LENGTH | LENGTH => true case _ => false - } - def isArrayGet(code: Int): Boolean = code match { + def isArrayGet(code: Int): Boolean = code match case ZARRAY_GET | BARRAY_GET | SARRAY_GET | CARRAY_GET | IARRAY_GET | LARRAY_GET | FARRAY_GET | DARRAY_GET | OARRAY_GET | APPLY => true case _ => false - } - def isArraySet(code: Int): Boolean = code match { + def isArraySet(code: Int): Boolean = code match case ZARRAY_SET | BARRAY_SET | SARRAY_SET | CARRAY_SET | IARRAY_SET | LARRAY_SET | FARRAY_SET | DARRAY_SET | OARRAY_SET | UPDATE => true case _ => false - } /** Check whether the given code is a comparison operator */ - def isComparisonOp(code: Int): Boolean = code match { + def isComparisonOp(code: Int): Boolean = code match case ID | NI | EQ | NE | LT | LE | GT | GE => true case _ => false - } def isUniversalEqualityOp(code: Int): Boolean = (code == EQ) || (code == NE) def isReferenceEqualityOp(code: Int): Boolean = (code == ID) || (code == NI) - def isArithmeticOp(code: Int): Boolean = code match { + def isArithmeticOp(code: Int): Boolean = code match case POS | NEG | NOT => true; // unary case ADD | SUB | MUL | DIV | MOD => true; // binary case OR | XOR | AND | LSL | LSR | ASR => true; // bitwise case _ => false - } - def isLogicalOp(code: Int): Boolean = code match { + def isLogicalOp(code: Int): Boolean = code match case ZNOT | ZAND | ZOR => true case _ => false - } - def isShiftOp(code: Int): Boolean = code match { + def isShiftOp(code: Int): Boolean = code match case LSL | LSR | ASR => true case _ => false - } - def isBitwiseOp(code: Int): Boolean = code match { + def isBitwiseOp(code: Int): Boolean = code match case OR | XOR | AND => true case _ => false - } def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D) -} diff --git a/compiler/src/dotty/tools/backend/WorklistAlgorithm.scala b/compiler/src/dotty/tools/backend/WorklistAlgorithm.scala index b3d98d425b2a..9f33af4badca 100644 --- a/compiler/src/dotty/tools/backend/WorklistAlgorithm.scala +++ b/compiler/src/dotty/tools/backend/WorklistAlgorithm.scala @@ -14,22 +14,20 @@ package backend * @version 1.0 * @see [[scala.tools.nsc.backend.icode.Linearizers]] */ -trait WorklistAlgorithm { +trait WorklistAlgorithm: type Elem - class WList { + class WList: private var list: List[Elem] = Nil def isEmpty = list.isEmpty def nonEmpty = !isEmpty def push(e: Elem): Unit = { list = e :: list } - def pop(): Elem = { + def pop(): Elem = val head = list.head list = list.tail head - } def pushAll(xs: Iterable[Elem]): Unit = xs.foreach(push) def clear(): Unit = list = Nil - } val worklist: WList @@ -38,12 +36,11 @@ trait WorklistAlgorithm { * The initializer is run once before the loop starts and should * initialize the worklist. */ - def run(initWorklist: => Unit) = { + def run(initWorklist: => Unit) = initWorklist while (worklist.nonEmpty) processElement(dequeue) - } /** * Process the current element from the worklist. @@ -54,4 +51,3 @@ trait WorklistAlgorithm { * Remove and return the first element to be processed from the worklist. */ def dequeue: Elem -} diff --git a/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala b/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala index e6393ce82054..51ebd19a8d76 100644 --- a/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala +++ b/compiler/src/dotty/tools/backend/jvm/AsmUtils.scala @@ -9,7 +9,7 @@ import java.io.PrintWriter import scala.tools.asm.util.{TraceClassVisitor, TraceMethodVisitor, Textifier} import scala.tools.asm.ClassReader -object AsmUtils { +object AsmUtils: /** * Print the bytecode of methods generated by GenBCode to the standard output. Only methods @@ -33,7 +33,7 @@ object AsmUtils { inline val traceSerializedClassEnabled = false inline val traceSerializedClassPattern = "" - def traceMethod(mnode: MethodNode1): Unit = { + def traceMethod(mnode: MethodNode1): Unit = println(s"Bytecode for method ${mnode.name}") val p = new Textifier val tracer = new TraceMethodVisitor(p) @@ -41,25 +41,20 @@ object AsmUtils { val w = new PrintWriter(System.out) p.print(w) w.flush() - } - def traceClass(cnode: ClassNode1): Unit = { + def traceClass(cnode: ClassNode1): Unit = println(s"Bytecode for class ${cnode.name}") val w = new PrintWriter(System.out) cnode.accept(new TraceClassVisitor(w)) w.flush() - } def traceClass(bytes: Array[Byte]): Unit = traceClass(readClass(bytes)) - def readClass(bytes: Array[Byte]): ClassNode1 = { + def readClass(bytes: Array[Byte]): ClassNode1 = val node = new ClassNode1() new ClassReader(bytes).accept(node, 0) node - } - def instructionString(instruction: AbstractInsnNode): String = instruction.getOpcode match { + def instructionString(instruction: AbstractInsnNode): String = instruction.getOpcode match case -1 => instruction.toString case op => scala.tools.asm.util.Printer.OPCODES(op) - } -} diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala index d95638be2695..46d14b47e06e 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeAsmCommon.scala @@ -12,7 +12,7 @@ import dotty.tools.dotc.report * This trait contains code shared between GenBCode and GenASM that depends on types defined in * the compiler cake (Global). */ -final class BCodeAsmCommon[I <: DottyBackendInterface](val interface: I) { +final class BCodeAsmCommon[I <: DottyBackendInterface](val interface: I): import interface.given import DottyBackendInterface.symExtensions @@ -22,16 +22,14 @@ final class BCodeAsmCommon[I <: DottyBackendInterface](val interface: I) { * It is also used to decide whether the "owner" field in the InnerClass attribute should be * null. */ - def isAnonymousOrLocalClass(classSym: Symbol): Boolean = { + def isAnonymousOrLocalClass(classSym: Symbol): Boolean = assert(classSym.isClass, s"not a class: $classSym") // Here used to be an `assert(!classSym.isDelambdafyFunction)`: delambdafy lambda classes are // always top-level. However, SI-8900 shows an example where the weak name-based implementation // of isDelambdafyFunction failed (for a function declared in a package named "lambda"). - classSym.isAnonymousClass || { + classSym.isAnonymousClass `||`: val originalOwner = classSym.originalOwner originalOwner != NoSymbol && !originalOwner.isClass - } - } /** * Returns the enclosing method for non-member classes. In the following example @@ -56,28 +54,24 @@ final class BCodeAsmCommon[I <: DottyBackendInterface](val interface: I) { * The EnclosingMethod attribute needs to be added to non-member classes (see doc in BTypes). * This is a source-level property, so we need to use the originalOwner chain to reconstruct it. */ - private def enclosingMethodForEnclosingMethodAttribute(classSym: Symbol): Option[Symbol] = { + private def enclosingMethodForEnclosingMethodAttribute(classSym: Symbol): Option[Symbol] = assert(classSym.isClass, classSym) - def enclosingMethod(sym: Symbol): Option[Symbol] = { + def enclosingMethod(sym: Symbol): Option[Symbol] = if (sym.isClass || sym == NoSymbol) None else if (sym.is(Method)) Some(sym) else enclosingMethod(sym.originalOwner) - } enclosingMethod(classSym.originalOwner) - } /** * The enclosing class for emitting the EnclosingMethod attribute. Since this is a source-level * property, this method looks at the originalOwner chain. See doc in BTypes. */ - private def enclosingClassForEnclosingMethodAttribute(classSym: Symbol): Symbol = { + private def enclosingClassForEnclosingMethodAttribute(classSym: Symbol): Symbol = assert(classSym.isClass, classSym) - def enclosingClass(sym: Symbol): Symbol = { + def enclosingClass(sym: Symbol): Symbol = if (sym.isClass) sym else enclosingClass(sym.originalOwner.originalLexicallyEnclosingClass) - } enclosingClass(classSym.originalOwner.originalLexicallyEnclosingClass) - } /*final*/ case class EnclosingMethodEntry(owner: String, name: String, methodDescriptor: String) @@ -89,70 +83,59 @@ final class BCodeAsmCommon[I <: DottyBackendInterface](val interface: I) { * symbol, and to obtain a method signature descriptor fro a method symbol. These function depend * on the implementation of GenASM / GenBCode, so they need to be passed in. */ - def enclosingMethodAttribute(classSym: Symbol, classDesc: Symbol => String, methodDesc: Symbol => String): Option[EnclosingMethodEntry] = { - if (isAnonymousOrLocalClass(classSym)) { + def enclosingMethodAttribute(classSym: Symbol, classDesc: Symbol => String, methodDesc: Symbol => String): Option[EnclosingMethodEntry] = + if (isAnonymousOrLocalClass(classSym)) val methodOpt = enclosingMethodForEnclosingMethodAttribute(classSym) report.debuglog(s"enclosing method for $classSym is $methodOpt (in ${methodOpt.map(_.enclosingClass)})") Some(EnclosingMethodEntry( classDesc(enclosingClassForEnclosingMethodAttribute(classSym)), methodOpt.map(_.javaSimpleName).orNull, methodOpt.map(methodDesc).orNull)) - } else { + else None - } - } -} -object BCodeAsmCommon{ - def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = { +object BCodeAsmCommon: + def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = val ca = new Array[Char](bytes.length) var idx = 0 - while(idx < bytes.length) { + while(idx < bytes.length) val b: Byte = bytes(idx) assert((b & ~0x7f) == 0) ca(idx) = b.asInstanceOf[Char] idx += 1 - } ca - } - final def arrEncode(bSeven: Array[Byte]): Array[String] = { + final def arrEncode(bSeven: Array[Byte]): Array[String] = var strs: List[String] = Nil // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure) var prevOffset = 0 var offset = 0 var encLength = 0 - while(offset < bSeven.length) { + while(offset < bSeven.length) val deltaEncLength = (if(bSeven(offset) == 0) 2 else 1) val newEncLength = encLength.toLong + deltaEncLength - if(newEncLength >= 65535) { + if(newEncLength >= 65535) val ba = bSeven.slice(prevOffset, offset) strs ::= new java.lang.String(ubytesToCharArray(ba)) encLength = 0 prevOffset = offset - } else { + else encLength += deltaEncLength offset += 1 - } - } - if(prevOffset < offset) { + if(prevOffset < offset) assert(offset == bSeven.length) val ba = bSeven.slice(prevOffset, offset) strs ::= new java.lang.String(ubytesToCharArray(ba)) - } assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict? strs.reverse.toArray - } - def strEncode(bSeven: Array[Byte]): String = { + def strEncode(bSeven: Array[Byte]): String = val ca = ubytesToCharArray(bSeven) new java.lang.String(ca) // debug val bvA = new asm.ByteVector; bvA.putUTF8(s) // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes) // debug assert(enc(idx) == bvA.getByte(idx + 2)) // debug assert(bvA.getLength == enc.size + 2) - } -} diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala index e7b5a0dad1bf..e787b3b1b10c 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeBodyBuilder.scala @@ -32,7 +32,7 @@ import dotty.tools.dotc.report * @version 1.0 * */ -trait BCodeBodyBuilder extends BCodeSkelBuilder { +trait BCodeBodyBuilder extends BCodeSkelBuilder: // import global._ // import definitions._ import tpd._ @@ -46,7 +46,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { /* * Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions. */ - abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) { + abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit): import Primitives.TestOp @@ -54,8 +54,8 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { def emit(opc: Int): Unit = { mnode.visitInsn(opc) } - def emitZeroOf(tk: BType): Unit = { - tk match { + def emitZeroOf(tk: BType): Unit = + tk match case BOOL => bc.boolconst(false) case BYTE | SHORT | @@ -66,25 +66,22 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { case DOUBLE => bc.dconst(0) case UNIT => () case _ => emit(asm.Opcodes.ACONST_NULL) - } - } /* * Emits code that adds nothing to the operand stack. * Two main cases: `tree` is an assignment, * otherwise an `adapt()` to UNIT is performed if needed. */ - def genStat(tree: Tree): Unit = { + def genStat(tree: Tree): Unit = lineNumber(tree) - tree match { + tree match case Assign(lhs @ DesugaredSelect(qual, _), rhs) => val savedStackHeight = stackHeight val isStatic = lhs.symbol.isStaticMember - if (!isStatic) { + if (!isStatic) genLoadQualifier(lhs) stackHeight += 1 - } genLoad(rhs, symInfoTK(lhs.symbol)) stackHeight = savedStackHeight lineNumber(tree) @@ -96,7 +93,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val s = lhs.symbol val Local(tk, _, idx, _) = locals.getOrMakeLocal(s) - rhs match { + rhs match case Apply(Select(larg: Ident, nme.ADD), Literal(x) :: Nil) if larg.symbol == s && tk.isIntSizedType && x.isShortRange => lineNumber(tree) @@ -111,192 +108,176 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { genLoad(rhs, tk) lineNumber(tree) bc.store(idx, tk) - } case _ => genLoad(tree, UNIT) - } - } /* Generate code for primitive arithmetic operations. */ - def genArithmeticOp(tree: Tree, code: Int): BType = tree match{ + def genArithmeticOp(tree: Tree, code: Int): BType = tree match case Apply(fun @ DesugaredSelect(larg, _), args) => - var resKind = tpeTK(larg) + var resKind = tpeTK(larg) - assert(resKind.isNumericType || (resKind == BOOL), + assert(resKind.isNumericType || (resKind == BOOL), s"$resKind is not a numeric or boolean type [operation: ${fun.symbol}]") - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps._ - args match { + args match // unary operation - case Nil => - genLoad(larg, resKind) - code match { - case POS => () // nothing - case NEG => bc.neg(resKind) - case NOT => bc.genPrimitiveArithmetic(Primitives.NOT, resKind) - case _ => abort(s"Unknown unary operation: ${fun.symbol.showFullName} code: $code") - } + case Nil => + genLoad(larg, resKind) + code match + case POS => () // nothing + case NEG => bc.neg(resKind) + case NOT => bc.genPrimitiveArithmetic(Primitives.NOT, resKind) + case _ => abort(s"Unknown unary operation: ${fun.symbol.showFullName} code: $code") // binary operation - case rarg :: Nil => - val isShift = isShiftOp(code) - resKind = tpeTK(larg).maxType(if (isShift) INT else tpeTK(rarg)) + case rarg :: Nil => + val isShift = isShiftOp(code) + resKind = tpeTK(larg).maxType(if (isShift) INT else tpeTK(rarg)) - if (isShift || isBitwiseOp(code)) { - assert(resKind.isIntegralType || (resKind == BOOL), + if (isShift || isBitwiseOp(code)) + assert(resKind.isIntegralType || (resKind == BOOL), s"$resKind incompatible with arithmetic modulo operation.") - } - genLoad(larg, resKind) - stackHeight += resKind.size - genLoad(rarg, if (isShift) INT else resKind) - stackHeight -= resKind.size + genLoad(larg, resKind) + stackHeight += resKind.size + genLoad(rarg, if (isShift) INT else resKind) + stackHeight -= resKind.size - (code: @switch) match { - case ADD => bc add resKind - case SUB => bc sub resKind - case MUL => bc mul resKind - case DIV => bc div resKind - case MOD => bc rem resKind + (code: @switch) match + case ADD => bc add resKind + case SUB => bc sub resKind + case MUL => bc mul resKind + case DIV => bc div resKind + case MOD => bc rem resKind - case OR | XOR | AND => bc.genPrimitiveLogical(code, resKind) + case OR | XOR | AND => bc.genPrimitiveLogical(code, resKind) - case LSL | LSR | ASR => bc.genPrimitiveShift(code, resKind) + case LSL | LSR | ASR => bc.genPrimitiveShift(code, resKind) - case _ => abort(s"Unknown primitive: ${fun.symbol}[$code]") - } + case _ => abort(s"Unknown primitive: ${fun.symbol}[$code]") - case _ => - abort(s"Too many arguments for primitive function: $tree") - } - lineNumber(tree) - resKind - } + case _ => + abort(s"Too many arguments for primitive function: $tree") + lineNumber(tree) + resKind /* Generate primitive array operations. */ - def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = tree match{ + def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = tree match case Apply(DesugaredSelect(arrayObj, _), args) => - import ScalaPrimitivesOps._ - val k = tpeTK(arrayObj) - genLoad(arrayObj, k) - val elementType = typeOfArrayOp.getOrElse[bTypes.BType](code, abort(s"Unknown operation on arrays: $tree code: $code")) + import ScalaPrimitivesOps._ + val k = tpeTK(arrayObj) + genLoad(arrayObj, k) + val elementType = typeOfArrayOp.getOrElse[bTypes.BType](code, abort(s"Unknown operation on arrays: $tree code: $code")) - var generatedType = expectedType + var generatedType = expectedType - if (isArrayGet(code)) { + if (isArrayGet(code)) // load argument on stack - assert(args.length == 1, s"Too many arguments for array get operation: $tree"); - stackHeight += 1 - genLoad(args.head, INT) - stackHeight -= 1 - generatedType = k.asArrayBType.componentType - bc.aload(elementType) - } - else if (isArraySet(code)) { - val List(a1, a2) = args - stackHeight += 1 - genLoad(a1, INT) - stackHeight += 1 - genLoad(a2) - stackHeight -= 2 - generatedType = UNIT - bc.astore(elementType) - } else { - generatedType = INT - emit(asm.Opcodes.ARRAYLENGTH) - } - lineNumber(tree) + assert(args.length == 1, s"Too many arguments for array get operation: $tree"); + stackHeight += 1 + genLoad(args.head, INT) + stackHeight -= 1 + generatedType = k.asArrayBType.componentType + bc.aload(elementType) + else if (isArraySet(code)) + val List(a1, a2) = args + stackHeight += 1 + genLoad(a1, INT) + stackHeight += 1 + genLoad(a2) + stackHeight -= 2 + generatedType = UNIT + bc.astore(elementType) + else + generatedType = INT + emit(asm.Opcodes.ARRAYLENGTH) + lineNumber(tree) - generatedType - } + generatedType - def genLoadIfTo(tree: If, expectedType: BType, dest: LoadDestination): BType = tree match{ + def genLoadIfTo(tree: If, expectedType: BType, dest: LoadDestination): BType = tree match case If(condp, thenp, elsep) => - val success = new asm.Label - val failure = new asm.Label - - val hasElse = !elsep.isEmpty && (elsep match { - case Literal(value) if value.tag == UnitTag => false - case _ => true - }) + val success = new asm.Label + val failure = new asm.Label - genCond(condp, success, failure, targetIfNoJump = success) - markProgramPoint(success) + val hasElse = !elsep.isEmpty && (elsep match { + case Literal(value) if value.tag == UnitTag => false + case _ => true + }) - if dest == LoadDestination.FallThrough then - if hasElse then - val thenKind = tpeTK(thenp) - val elseKind = tpeTK(elsep) - def hasUnitBranch = (thenKind == UNIT || elseKind == UNIT) && expectedType == UNIT - val resKind = if (hasUnitBranch) UNIT else tpeTK(tree) + genCond(condp, success, failure, targetIfNoJump = success) + markProgramPoint(success) - val postIf = new asm.Label - genLoadTo(thenp, resKind, LoadDestination.Jump(postIf, stackHeight)) - markProgramPoint(failure) - genLoadTo(elsep, resKind, LoadDestination.FallThrough) - markProgramPoint(postIf) - resKind + if dest == LoadDestination.FallThrough then + if hasElse then + val thenKind = tpeTK(thenp) + val elseKind = tpeTK(elsep) + def hasUnitBranch = (thenKind == UNIT || elseKind == UNIT) && expectedType == UNIT + val resKind = if (hasUnitBranch) UNIT else tpeTK(tree) + + val postIf = new asm.Label + genLoadTo(thenp, resKind, LoadDestination.Jump(postIf, stackHeight)) + markProgramPoint(failure) + genLoadTo(elsep, resKind, LoadDestination.FallThrough) + markProgramPoint(postIf) + resKind + else + genLoad(thenp, UNIT) + markProgramPoint(failure) + UNIT + end if else - genLoad(thenp, UNIT) + genLoadTo(thenp, expectedType, dest) markProgramPoint(failure) - UNIT + if hasElse then + genLoadTo(elsep, expectedType, dest) + else + genAdaptAndSendToDest(UNIT, expectedType, dest) + expectedType end if - else - genLoadTo(thenp, expectedType, dest) - markProgramPoint(failure) - if hasElse then - genLoadTo(elsep, expectedType, dest) - else - genAdaptAndSendToDest(UNIT, expectedType, dest) - expectedType - end if - } - def genPrimitiveOp(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match { + def genPrimitiveOp(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match case Apply(fun @ DesugaredSelect(receiver, _), _) => - val sym = tree.symbol + val sym = tree.symbol - val code = primitives.getPrimitive(tree, receiver.tpe) + val code = primitives.getPrimitive(tree, receiver.tpe) - import ScalaPrimitivesOps._ + import ScalaPrimitivesOps._ - if (isArithmeticOp(code)) genArithmeticOp(tree, code) - else if (code == CONCAT) genStringConcat(tree) - else if (code == HASH) genScalaHash(receiver) - else if (isArrayOp(code)) genArrayOp(tree, code, expectedType) - else if (isLogicalOp(code) || isComparisonOp(code)) { - val success, failure, after = new asm.Label - genCond(tree, success, failure, targetIfNoJump = success) + if (isArithmeticOp(code)) genArithmeticOp(tree, code) + else if (code == CONCAT) genStringConcat(tree) + else if (code == HASH) genScalaHash(receiver) + else if (isArrayOp(code)) genArrayOp(tree, code, expectedType) + else if (isLogicalOp(code) || isComparisonOp(code)) + val success, failure, after = new asm.Label + genCond(tree, success, failure, targetIfNoJump = success) // success block - markProgramPoint(success) - bc boolconst true - bc goTo after + markProgramPoint(success) + bc boolconst true + bc goTo after // failure block - markProgramPoint(failure) - bc boolconst false + markProgramPoint(failure) + bc boolconst false // after - markProgramPoint(after) + markProgramPoint(after) - BOOL - } - else if (isCoercion(code)) { - genLoad(receiver) - lineNumber(tree) - genCoercion(code) - coercionTo(code) - } - else abort( + BOOL + else if (isCoercion(code)) + genLoad(receiver) + lineNumber(tree) + genCoercion(code) + coercionTo(code) + else abort( s"Primitive operation not handled yet: ${sym.showFullName}(${fun.symbol.name}) at: ${tree.span}" - ) - } + ) - def genLoad(tree: Tree): Unit = { + def genLoad(tree: Tree): Unit = genLoad(tree, tpeTK(tree)) - } /* Generate code for trees that produce values on the stack */ def genLoad(tree: Tree, expectedType: BType): Unit = @@ -309,7 +290,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { lineNumber(tree) - tree match { + tree match case tree@ValDef(_, _, _) => val sym = tree.symbol /* most of the time, !locals.contains(sym), unless the current activation of genLoad() is being called @@ -320,9 +301,8 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { else { genLoad(tree.rhs, tk) } bc.store(idx, tk) val localVarStart = currProgramPoint() - if (!isSynth) { // there are case ValDef's emitted by patmat + if (!isSynth) // there are case ValDef's emitted by patmat varsInScope ::= (sym -> localVarStart) - } generatedType = UNIT case t @ If(_, _, _) => @@ -358,13 +338,12 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val functionalInterface: Symbol = if !tpt.isEmpty then tpt.tpe.classSymbol else t.tpe.classSymbol - val (fun, args) = call match { + val (fun, args) = call match case Apply(fun, args) => (fun, args) case t @ DesugaredSelect(_, _) => (t, Nil) // TODO: use Select case t @ Ident(_) => (t, Nil) - } - if (!fun.symbol.isStaticMember) { + if (!fun.symbol.isStaticMember) // load receiver of non-static implementation of lambda // darkdimius: I haven't found in spec `this` reference should go @@ -373,7 +352,6 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val DesugaredSelect(prefix, _) = fun: @unchecked genLoad(prefix) - } genLoadArguments(env, fun.symbol.info.firstParamTypes map toTypeKind) generatedType = genInvokeDynamicLambda(NoSymbol, fun.symbol, env.size, functionalInterface) @@ -385,19 +363,16 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val symIsModuleClass = tree.symbol.is(ModuleClass) assert(tree.symbol == claszSymbol || symIsModuleClass, s"Trying to access the this of another class: tree.symbol = ${tree.symbol}, class symbol = $claszSymbol compilation unit: $cunit") - if (symIsModuleClass && tree.symbol != claszSymbol) { + if (symIsModuleClass && tree.symbol != claszSymbol) generatedType = genLoadModule(tree) - } - else { + else mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) // When compiling Array.scala, the constructor invokes `Array.this.super.`. The expectedType // is `[Object` (computed by typeToBType, the type of This(Array) is `Array[T]`). If we would set // the generatedType to `Array` below, the call to adapt at the end would fail. The situation is // similar for primitives (`I` vs `Int`). - if (tree.symbol != defn.ArrayClass && !tree.symbol.isPrimitiveValueClass) { + if (tree.symbol != defn.ArrayClass && !tree.symbol.isPrimitiveValueClass) generatedType = classBTypeFromSymbol(claszSymbol) - } - } case DesugaredSelect(Ident(nme.EMPTY_PACKAGE), module) => assert(tree.symbol.is(Module), s"Selection of non-module from empty package: $tree sym: ${tree.symbol} at: ${tree.span}") @@ -412,16 +387,15 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { // receiverClass is used in the bytecode to access the field. using sym.owner may lead to IllegalAccessError def receiverClass = qualifier.tpe.typeSymbol - if (sym.is(Module)) { + if (sym.is(Module)) genLoadQualUnlessElidable() genLoadModule(tree) - } else if (sym.isStaticMember) { + else if (sym.isStaticMember) genLoadQualUnlessElidable() fieldLoad(sym, receiverClass) - } else { + else genLoadQualifier(tree) fieldLoad(sym, receiverClass) - } case t @ Ident(name) => val sym = tree.symbol @@ -429,23 +403,20 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { generatedType = tk val desugared = cachedDesugarIdent(t) - desugared match { + desugared match case None => - if (!sym.is(Package)) { + if (!sym.is(Package)) if (sym.is(Module)) genLoadModule(sym) else locals.load(sym) - } case Some(t) => genLoad(t, generatedType) - } case Literal(value) => - if (value.tag != UnitTag) (value.tag, expectedType) match { + if (value.tag != UnitTag) (value.tag, expectedType) match case (IntTag, LONG ) => bc.lconst(value.longValue); generatedType = LONG case (FloatTag, DOUBLE) => bc.dconst(value.doubleValue); generatedType = DOUBLE case (NullTag, _ ) => bc.emit(asm.Opcodes.ACONST_NULL); generatedType = srNullRef case _ => genConstant(value); generatedType = tpeTK(tree) - } case blck @ Block(stats, expr) => if(stats.isEmpty) @@ -480,7 +451,6 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { generatedType = genTypeApply(t) case _ => abort(s"Unexpected tree in genLoad: $tree/${tree.getClass} at: ${tree.span}") - } // emit conversion and send to the right destination if generatedDest == LoadDestination.FallThrough then @@ -531,7 +501,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { /* * must-single-thread */ - private def fieldOp(field: Symbol, isLoad: Boolean, specificReceiver: Symbol): Unit = { + private def fieldOp(field: Symbol, isLoad: Boolean, specificReceiver: Symbol): Unit = val useSpecificReceiver = specificReceiver != null && !field.isScalaStatic val owner = internalName(if (useSpecificReceiver) specificReceiver else field.owner) @@ -543,7 +513,6 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { else { if (isStatic) asm.Opcodes.PUTSTATIC else asm.Opcodes.PUTFIELD } mnode.visitFieldInsn(opc, owner, fieldJName, fieldDescr) - } // ---------------- emitting constant values ---------------- @@ -552,8 +521,8 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { * must-single-thread * Otherwise it's safe to call from multiple threads. */ - def genConstant(const: Constant): Unit = { - (const.tag/*: @switch*/) match { + def genConstant(const: Constant): Unit = + (const.tag/*: @switch*/) match case BooleanTag => bc.boolconst(const.booleanValue) @@ -588,35 +557,32 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { mnode.visitLdcInsn(tp.toASMType) case _ => abort(s"Unknown constant value: $const") - } - } - private def genLabeledTo(tree: Labeled, expectedType: BType, dest: LoadDestination): BType = tree match { + private def genLabeledTo(tree: Labeled, expectedType: BType, dest: LoadDestination): BType = tree match case Labeled(bind, expr) => - val labelSym = bind.symbol + val labelSym = bind.symbol - if dest == LoadDestination.FallThrough then - val resKind = tpeTK(tree) - val jumpTarget = new asm.Label - registerJumpDest(labelSym, resKind, LoadDestination.Jump(jumpTarget, stackHeight)) - genLoad(expr, resKind) - markProgramPoint(jumpTarget) - resKind - else - registerJumpDest(labelSym, expectedType, dest) - genLoadTo(expr, expectedType, dest) - expectedType - end if - } + if dest == LoadDestination.FallThrough then + val resKind = tpeTK(tree) + val jumpTarget = new asm.Label + registerJumpDest(labelSym, resKind, LoadDestination.Jump(jumpTarget, stackHeight)) + genLoad(expr, resKind) + markProgramPoint(jumpTarget) + resKind + else + registerJumpDest(labelSym, expectedType, dest) + genLoadTo(expr, expectedType, dest) + expectedType + end if - private def genReturn(r: Return): Unit = { + private def genReturn(r: Return): Unit = val expr: Tree = r.expr val fromSym: Symbol = if (r.from.symbol.is(LabelFlag)) r.from.symbol else NoSymbol - if (NoSymbol == fromSym) { + if (NoSymbol == fromSym) // return from enclosing method - cleanups match { + cleanups match case Nil => // not an assertion: !shouldEmitCleanup (at least not yet, pendingCleanups() may still have to run, and reset `shouldEmitCleanup`. genLoadTo(expr, returnType, LoadDestination.Return) @@ -624,17 +590,14 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { genLoad(expr, returnType) lineNumber(r) val saveReturnValue = (returnType != UNIT) - if (saveReturnValue) { + if (saveReturnValue) // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted. - if (earlyReturnVar == null) { + if (earlyReturnVar == null) earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar", expr.tpe, expr.span) - } locals.store(earlyReturnVar) - } bc goTo nextCleanup shouldEmitCleanup = true - } - } else { + else // return from labeled assert(fromSym.is(LabelFlag), fromSym) assert(!fromSym.is(Method), fromSym) @@ -645,40 +608,38 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { */ val (exprExpectedType, exprDest) = findJumpDest(fromSym) genLoadTo(expr, exprExpectedType, exprDest) - } - } // end of genReturn() + // end of genReturn() - def genWhileDo(tree: WhileDo): LoadDestination = tree match{ + def genWhileDo(tree: WhileDo): LoadDestination = tree match case WhileDo(cond, body) => - val isInfinite = cond == tpd.EmptyTree + val isInfinite = cond == tpd.EmptyTree - val loop = new asm.Label - markProgramPoint(loop) + val loop = new asm.Label + markProgramPoint(loop) - if isInfinite then - val dest = LoadDestination.Jump(loop, stackHeight) - genLoadTo(body, UNIT, dest) - dest - else - body match - case Literal(value) if value.tag == UnitTag => + if isInfinite then + val dest = LoadDestination.Jump(loop, stackHeight) + genLoadTo(body, UNIT, dest) + dest + else + body match + case Literal(value) if value.tag == UnitTag => // this is the shape of do..while loops - val exitLoop = new asm.Label - genCond(cond, loop, exitLoop, targetIfNoJump = exitLoop) - markProgramPoint(exitLoop) - case _ => - val success = new asm.Label - val failure = new asm.Label - genCond(cond, success, failure, targetIfNoJump = success) - markProgramPoint(success) - genLoadTo(body, UNIT, LoadDestination.Jump(loop, stackHeight)) - markProgramPoint(failure) - end match - LoadDestination.FallThrough - } + val exitLoop = new asm.Label + genCond(cond, loop, exitLoop, targetIfNoJump = exitLoop) + markProgramPoint(exitLoop) + case _ => + val success = new asm.Label + val failure = new asm.Label + genCond(cond, success, failure, targetIfNoJump = success) + markProgramPoint(success) + genLoadTo(body, UNIT, LoadDestination.Jump(loop, stackHeight)) + markProgramPoint(failure) + end match + LoadDestination.FallThrough - def genTypeApply(t: TypeApply): BType = (t: @unchecked) match { + def genTypeApply(t: TypeApply): BType = (t: @unchecked) match case TypeApply(fun@DesugaredSelect(obj, _), targs) => val sym = fun.symbol @@ -693,59 +654,50 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { // TODO @lry make pattern match if (l.isPrimitive && r.isPrimitive) genConversion(l, r, cast) - else if (l.isPrimitive) { + else if (l.isPrimitive) bc drop l - if (cast) { + if (cast) mnode.visitTypeInsn(asm.Opcodes.NEW, jlClassCastExceptionRef.internalName) bc dup ObjectRef emit(asm.Opcodes.ATHROW) - } else { + else bc boolconst false - } - } - else if (r.isPrimitive && cast) { + else if (r.isPrimitive && cast) abort(s"Erasure should have added an unboxing operation to prevent this cast. Tree: $t") - } - else if (r.isPrimitive) { + else if (r.isPrimitive) bc isInstance boxedClassOfPrimitive(r.asPrimitiveBType) - } - else { + else assert(r.isRef, r) // ensure that it's not a method genCast(r.asRefBType, cast) - } if (cast) r else BOOL - } // end of genTypeApply() + // end of genTypeApply() - private def mkArrayConstructorCall(arr: ArrayBType, app: Apply, args: List[Tree]) = { + private def mkArrayConstructorCall(arr: ArrayBType, app: Apply, args: List[Tree]) = val dims = arr.dimension var elemKind = arr.elementType val argsSize = args.length - if (argsSize > dims) { + if (argsSize > dims) report.error(em"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)", ctx.source.atSpan(app.span)) - } - if (argsSize < dims) { + if (argsSize < dims) /* In one step: * elemKind = new BType(BType.ARRAY, arr.off + argsSize, arr.len - argsSize) * however the above does not enter a TypeName for each nested arrays in chrs. */ for (i <- args.length until dims) elemKind = ArrayBType(elemKind) - } genLoadArguments(args, List.fill(args.size)(INT)) - (argsSize /*: @switch*/) match { + (argsSize /*: @switch*/) match case 1 => bc newarray elemKind case _ => val descr = ("[" * argsSize) + elemKind.descriptor // denotes the same as: arrayN(elemKind, argsSize).descriptor mnode.visitMultiANewArrayInsn(descr, argsSize) - } - } - private def genApply(app: Apply, expectedType: BType): BType = { + private def genApply(app: Apply, expectedType: BType): BType = var generatedType = expectedType lineNumber(app) - app match { + app match case Apply(_, args) if app.symbol eq defn.newArrayMethod => val List(elemClaz, Literal(c: Constant), ArrayValue(_, dims)) = args: @unchecked @@ -782,7 +734,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { generatedType = toTypeKind(tpt.tpe) assert(generatedType.isRef, s"Non reference type cannot be instantiated: $generatedType") - generatedType match { + generatedType match case arr: ArrayBType => mkArrayConstructorCall(arr, app, args) @@ -797,7 +749,6 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { case _ => abort(s"Cannot instantiate $tpt of kind: $generatedType") - } case Apply(fun, List(expr)) if Erasure.Boxing.isBox(fun.symbol) && fun.symbol.denot.owner != defn.UnitModuleClass => val nativeKind = tpeTK(expr) @@ -816,9 +767,9 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { case app @ Apply(fun, args) => val sym = fun.symbol - if (isPrimitive(fun)) { // primitive method call + if (isPrimitive(fun)) // primitive method call generatedType = genPrimitiveOp(app, expectedType) - } else { // normal method call + else // normal method call val invokeStyle = if (sym.isStaticMember) InvokeStyle.Static else if (sym.is(Private) || sym.isClassConstructor) InvokeStyle.Special @@ -834,7 +785,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val DesugaredSelect(qual, name) = fun: @unchecked // fun is a Select, also checked in genLoadQualifier val isArrayClone = name == nme.clone_ && qual.tpe.widen.isInstanceOf[JavaArrayType] - if (isArrayClone) { + if (isArrayClone) // Special-case Array.clone, introduced in 36ef60e. The goal is to generate this call // as "[I.clone" instead of "java/lang/Object.clone". This is consistent with javac. // Arrays have a public method `clone` (jls 10.7). @@ -852,36 +803,31 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val methodBType = asmMethodType(sym) bc.invokevirtual(target, sym.javaSimpleName, methodBType.descriptor) generatedType = methodBType.returnType - } else { - val receiverClass = if (!invokeStyle.isVirtual) null else { + else + val receiverClass = if (!invokeStyle.isVirtual) null else // receiverClass is used in the bytecode to as the method receiver. using sym.owner // may lead to IllegalAccessErrors, see 9954eaf / aladdin bug 455. val qualSym = qual.tpe.typeSymbol - if (qualSym == defn.ArrayClass) { + if (qualSym == defn.ArrayClass) // For invocations like `Array(1).hashCode` or `.wait()`, use Object as receiver // in the bytecode. Using the array descriptor (like we do for clone above) seems // to work as well, but it seems safer not to change this. Javac also uses Object. // Note that array apply/update/length are handled by isPrimitive (above). assert(sym.owner == defn.ObjectClass, s"unexpected array call: $app") defn.ObjectClass - } else qualSym - } + else qualSym generatedType = genCallMethod(sym, invokeStyle, app.span, receiverClass) - } - } - } generatedType - } // end of genApply() + // end of genApply() - private def genArrayValue(av: tpd.JavaSeqLiteral): BType = { + private def genArrayValue(av: tpd.JavaSeqLiteral): BType = val ArrayValue(tpt, elems) = av: @unchecked lineNumber(av) genArray(elems, tpt) - } - private def genArray(elems: List[Tree], elemType: Type): BType = { + private def genArray(elems: List[Tree], elemType: Type): BType = val elmKind = toTypeKind(elemType) val generatedType = ArrayBType(elmKind) @@ -892,37 +838,35 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { var i = 0 var rest = elems - while (!rest.isEmpty) { + while (!rest.isEmpty) bc dup generatedType bc iconst i genLoad(rest.head, elmKind) bc astore elmKind rest = rest.tail i = i + 1 - } stackHeight -= 3 generatedType - } /* A Match node contains one or more case clauses, each case clause lists one or more * Int/String values to use as keys, and a code block. The exception is the "default" case * clause which doesn't list any key (there is exactly one of these per match). */ - private def genMatchTo(tree: Match, expectedType: BType, dest: LoadDestination): BType = tree match { + private def genMatchTo(tree: Match, expectedType: BType, dest: LoadDestination): BType = tree match case Match(selector, cases) => - lineNumber(tree) + lineNumber(tree) - val (generatedType, postMatch, postMatchDest) = - if dest == LoadDestination.FallThrough then - val postMatch = new asm.Label - (tpeTK(tree), postMatch, LoadDestination.Jump(postMatch, stackHeight)) - else - (expectedType, null, dest) + val (generatedType, postMatch, postMatchDest) = + if dest == LoadDestination.FallThrough then + val postMatch = new asm.Label + (tpeTK(tree), postMatch, LoadDestination.Jump(postMatch, stackHeight)) + else + (expectedType, null, dest) // Only two possible selector types exist in `Match` trees at this point: Int and String - if (tpeTK(selector) == INT) { + if (tpeTK(selector) == INT) /* On a first pass over the case clauses, we flatten the keys and their * targets (the latter represented with asm.Labels). That representation @@ -931,47 +875,43 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { * On a second pass, we emit the switch blocks, one for each different target. */ - var flatKeys: List[Int] = Nil - var targets: List[asm.Label] = Nil - var default: asm.Label = null - var switchBlocks: List[(asm.Label, Tree)] = Nil + var flatKeys: List[Int] = Nil + var targets: List[asm.Label] = Nil + var default: asm.Label = null + var switchBlocks: List[(asm.Label, Tree)] = Nil - genLoad(selector, INT) + genLoad(selector, INT) // collect switch blocks and their keys, but don't emit yet any switch-block. - for (caze @ CaseDef(pat, guard, body) <- cases) { - assert(guard == tpd.EmptyTree, guard) - val switchBlockPoint = new asm.Label - switchBlocks ::= (switchBlockPoint, body) - pat match { - case Literal(value) => - flatKeys ::= value.intValue - targets ::= switchBlockPoint - case Ident(nme.WILDCARD) => - assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") - default = switchBlockPoint - case Alternative(alts) => - alts foreach { - case Literal(value) => - flatKeys ::= value.intValue - targets ::= switchBlockPoint - case _ => - abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") - } - case _ => - abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") - } - } + for (caze @ CaseDef(pat, guard, body) <- cases) + assert(guard == tpd.EmptyTree, guard) + val switchBlockPoint = new asm.Label + switchBlocks ::= (switchBlockPoint, body) + pat match + case Literal(value) => + flatKeys ::= value.intValue + targets ::= switchBlockPoint + case Ident(nme.WILDCARD) => + assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") + default = switchBlockPoint + case Alternative(alts) => + alts foreach: + case Literal(value) => + flatKeys ::= value.intValue + targets ::= switchBlockPoint + case _ => + abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") + case _ => + abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") - bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) + bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) // emit switch-blocks. - for (sb <- switchBlocks.reverse) { - val (caseLabel, caseBody) = sb - markProgramPoint(caseLabel) - genLoadTo(caseBody, generatedType, postMatchDest) - } - } else { + for (sb <- switchBlocks.reverse) + val (caseLabel, caseBody) = sb + markProgramPoint(caseLabel) + genLoadTo(caseBody, generatedType, postMatchDest) + else /* Since the JVM doesn't have a way to switch on a string, we switch * on the `hashCode` of the string then do an `equals` check (with a @@ -981,60 +921,56 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { * This mirrors the way that Java compiles `switch` on Strings. */ - var default: asm.Label = null - var indirectBlocks: List[(asm.Label, Tree)] = Nil + var default: asm.Label = null + var indirectBlocks: List[(asm.Label, Tree)] = Nil // Cases grouped by their hashCode - val casesByHash = SortedMap.empty[Int, List[(String, Either[asm.Label, Tree])]] - var caseFallback: Tree = null - - for (caze @ CaseDef(pat, guard, body) <- cases) { - assert(guard == tpd.EmptyTree, guard) - pat match { - case Literal(value) => - val strValue = value.stringValue - casesByHash.updateWith(strValue.##) { existingCasesOpt => - val newCase = (strValue, Right(body)) - Some(newCase :: existingCasesOpt.getOrElse(Nil)) - } - case Ident(nme.WILDCARD) => - assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") - default = new asm.Label - indirectBlocks ::= (default, body) - case Alternative(alts) => + val casesByHash = SortedMap.empty[Int, List[(String, Either[asm.Label, Tree])]] + var caseFallback: Tree = null + + for (caze @ CaseDef(pat, guard, body) <- cases) + assert(guard == tpd.EmptyTree, guard) + pat match + case Literal(value) => + val strValue = value.stringValue + casesByHash.updateWith(strValue.##) { existingCasesOpt => + val newCase = (strValue, Right(body)) + Some(newCase :: existingCasesOpt.getOrElse(Nil)) + } + case Ident(nme.WILDCARD) => + assert(default == null, s"multiple default targets in a Match node, at ${tree.span}") + default = new asm.Label + indirectBlocks ::= (default, body) + case Alternative(alts) => // We need an extra basic block since multiple strings can lead to this code - val indirectCaseGroupLabel = new asm.Label - indirectBlocks ::= (indirectCaseGroupLabel, body) - alts foreach { - case Literal(value) => - val strValue = value.stringValue - casesByHash.updateWith(strValue.##) { existingCasesOpt => - val newCase = (strValue, Left(indirectCaseGroupLabel)) - Some(newCase :: existingCasesOpt.getOrElse(Nil)) - } - case _ => - abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") - } + val indirectCaseGroupLabel = new asm.Label + indirectBlocks ::= (indirectCaseGroupLabel, body) + alts foreach: + case Literal(value) => + val strValue = value.stringValue + casesByHash.updateWith(strValue.##) { existingCasesOpt => + val newCase = (strValue, Left(indirectCaseGroupLabel)) + Some(newCase :: existingCasesOpt.getOrElse(Nil)) + } + case _ => + abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.span}") - case _ => - abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") - } - } + case _ => + abort(s"Invalid pattern in Match node: $tree at: ${tree.span}") // Organize the hashCode options into switch cases - var flatKeys: List[Int] = Nil - var targets: List[asm.Label] = Nil - var hashBlocks: List[(asm.Label, List[(String, Either[asm.Label, Tree])])] = Nil - for ((hashValue, hashCases) <- casesByHash) { - val switchBlockPoint = new asm.Label - hashBlocks ::= (switchBlockPoint, hashCases) - flatKeys ::= hashValue - targets ::= switchBlockPoint - } + var flatKeys: List[Int] = Nil + var targets: List[asm.Label] = Nil + var hashBlocks: List[(asm.Label, List[(String, Either[asm.Label, Tree])])] = Nil + for ((hashValue, hashCases) <- casesByHash) + val switchBlockPoint = new asm.Label + hashBlocks ::= (switchBlockPoint, hashCases) + flatKeys ::= hashValue + targets ::= switchBlockPoint // Push the hashCode of the string (or `0` it is `null`) onto the stack and switch on it - genLoadIfTo( + genLoadIfTo( If( tree.selector.select(defn.Any_==).appliedTo(nullLiteral), Literal(Constant(0)), @@ -1042,73 +978,63 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { ), INT, LoadDestination.FallThrough - ) - bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) + ) + bc.emitSWITCH(mkArrayReverse(flatKeys), mkArrayL(targets.reverse), default, MIN_SWITCH_DENSITY) // emit blocks for each hash case - for ((hashLabel, caseAlternatives) <- hashBlocks.reverse) { - markProgramPoint(hashLabel) - for ((caseString, indirectLblOrBody) <- caseAlternatives) { - val comparison = if (caseString == null) defn.Any_== else defn.Any_equals - val condp = Literal(Constant(caseString)).select(defn.Any_==).appliedTo(tree.selector) - val keepGoing = new asm.Label - indirectLblOrBody match { - case Left(jump) => - genCond(condp, jump, keepGoing, targetIfNoJump = keepGoing) - - case Right(caseBody) => - val thisCaseMatches = new asm.Label - genCond(condp, thisCaseMatches, keepGoing, targetIfNoJump = thisCaseMatches) - markProgramPoint(thisCaseMatches) - genLoadTo(caseBody, generatedType, postMatchDest) - } - markProgramPoint(keepGoing) - } - bc goTo default - } + for ((hashLabel, caseAlternatives) <- hashBlocks.reverse) + markProgramPoint(hashLabel) + for ((caseString, indirectLblOrBody) <- caseAlternatives) + val comparison = if (caseString == null) defn.Any_== else defn.Any_equals + val condp = Literal(Constant(caseString)).select(defn.Any_==).appliedTo(tree.selector) + val keepGoing = new asm.Label + indirectLblOrBody match + case Left(jump) => + genCond(condp, jump, keepGoing, targetIfNoJump = keepGoing) + + case Right(caseBody) => + val thisCaseMatches = new asm.Label + genCond(condp, thisCaseMatches, keepGoing, targetIfNoJump = thisCaseMatches) + markProgramPoint(thisCaseMatches) + genLoadTo(caseBody, generatedType, postMatchDest) + markProgramPoint(keepGoing) + bc goTo default // emit blocks for common patterns - for ((caseLabel, caseBody) <- indirectBlocks.reverse) { - markProgramPoint(caseLabel) - genLoadTo(caseBody, generatedType, postMatchDest) - } - } + for ((caseLabel, caseBody) <- indirectBlocks.reverse) + markProgramPoint(caseLabel) + genLoadTo(caseBody, generatedType, postMatchDest) - if postMatch != null then - markProgramPoint(postMatch) - generatedType - } + if postMatch != null then + markProgramPoint(postMatch) + generatedType - def genBlockTo(tree: Block, expectedType: BType, dest: LoadDestination): Unit = tree match { + def genBlockTo(tree: Block, expectedType: BType, dest: LoadDestination): Unit = tree match case Block(stats, expr) => - val savedScope = varsInScope - varsInScope = Nil - stats foreach genStat - genLoadTo(expr, expectedType, dest) - emitLocalVarScopes() - varsInScope = savedScope - } + val savedScope = varsInScope + varsInScope = Nil + stats foreach genStat + genLoadTo(expr, expectedType, dest) + emitLocalVarScopes() + varsInScope = savedScope /** Add entries to the `LocalVariableTable` JVM attribute for all the vars in * `varsInScope`, ending at the current program point. */ def emitLocalVarScopes(): Unit = - if (emitVars) { + if (emitVars) val end = currProgramPoint() - for ((sym, start) <- varsInScope.reverse) { + for ((sym, start) <- varsInScope.reverse) emitLocalVarScope(sym, start, end) - } - } end emitLocalVarScopes - def adapt(from: BType, to: BType): Unit = { - if (!from.conformsTo(to)) { - to match { + def adapt(from: BType, to: BType): Unit = + if (!from.conformsTo(to)) + to match case UNIT => bc drop from case _ => bc.emitT2T(from, to) - } - } else if (from.isNothingType) { + else if (from.isNothingType) /* There are two possibilities for from.isNothingType: emitting a "throw e" expressions and * loading a (phantom) value of type Nothing. * @@ -1155,7 +1081,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { */ if (lastInsn.getOpcode != asm.Opcodes.ATHROW) emit(asm.Opcodes.ATHROW) - } else if (from.isNullType) { + else if (from.isNullType) /* After loading an expression of type `scala.runtime.Null$`, introduce POP; ACONST_NULL. * This is required to pass the verifier: in Scala's type system, Null conforms to any * reference type. In bytecode, the type Null is represented by scala.runtime.Null$, which @@ -1167,31 +1093,24 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { * In order to fix the above problem, the value returned by nl is dropped and ACONST_NULL is * inserted instead - after all, an expression of type scala.runtime.Null$ can only be null. */ - if (lastInsn.getOpcode != asm.Opcodes.ACONST_NULL) { + if (lastInsn.getOpcode != asm.Opcodes.ACONST_NULL) bc drop from emit(asm.Opcodes.ACONST_NULL) - } - } - else (from, to) match { + else (from, to) match case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG) case _ => () - } - } /* Emit code to Load the qualifier of `tree` on top of the stack. */ - def genLoadQualifier(tree: Tree): Unit = { + def genLoadQualifier(tree: Tree): Unit = lineNumber(tree) - tree match { + tree match case DesugaredSelect(qualifier, _) => genLoad(qualifier) case t: Ident => // dotty specific - cachedDesugarIdent(t) match { + cachedDesugarIdent(t) match case Some(sel) => genLoadQualifier(sel) case None => assert(t.symbol.owner == this.claszSymbol) - } case _ => abort(s"Unknown qualifier $tree") - } - } def genLoadArguments(args: List[Tree], btpes: List[BType]): Unit = @tailrec def loop(args: List[Tree], btpes: List[BType]): Unit = @@ -1210,7 +1129,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { stackHeight = savedStackHeight end genLoadArguments - def genLoadModule(tree: Tree): BType = { + def genLoadModule(tree: Tree): BType = val module = ( if (!tree.symbol.is(PackageClass)) tree.symbol else tree.symbol.info.member(nme.PACKAGE).symbol match { @@ -1221,13 +1140,12 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { lineNumber(tree) genLoadModule(module) symInfoTK(module) - } - def genLoadModule(module: Symbol): Unit = { + def genLoadModule(module: Symbol): Unit = def inStaticMethod = methSymbol != null && methSymbol.isStaticMember - if (claszSymbol == module.moduleClass && jMethodName != "readResolve" && !inStaticMethod) { + if (claszSymbol == module.moduleClass && jMethodName != "readResolve" && !inStaticMethod) mnode.visitVarInsn(asm.Opcodes.ALOAD, 0) - } else { + else val mbt = symInfoTK(module).asClassBType mnode.visitFieldInsn( asm.Opcodes.GETSTATIC, @@ -1235,47 +1153,39 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { str.MODULE_INSTANCE_FIELD, mbt.descriptor // for nostalgics: toTypeKind(module.tpe).descriptor ) - } - } - def genConversion(from: BType, to: BType, cast: Boolean): Unit = { + def genConversion(from: BType, to: BType, cast: Boolean): Unit = if (cast) { bc.emitT2T(from, to) } - else { + else bc drop from bc boolconst (from == to) - } - } - def genCast(to: RefBType, cast: Boolean): Unit = { + def genCast(to: RefBType, cast: Boolean): Unit = if (cast) { bc checkCast to } else { bc isInstance to } - } /* Is the given symbol a primitive operation? */ - def isPrimitive(fun: Tree): Boolean = { + def isPrimitive(fun: Tree): Boolean = primitives.isPrimitive(fun) - } /* Generate coercion denoted by "code" */ - def genCoercion(code: Int): Unit = { + def genCoercion(code: Int): Unit = import ScalaPrimitivesOps._ - (code: @switch) match { + (code: @switch) match case B2B | S2S | C2C | I2I | L2L | F2F | D2D => () case _ => val from = coercionFrom(code) val to = coercionTo(code) bc.emitT2T(from, to) - } - } /* Generate string concatenation * * On JDK 8: create and append using `StringBuilder` * On JDK 9+: use `invokedynamic` with `StringConcatFactory` */ - def genStringConcat(tree: Tree): BType = { + def genStringConcat(tree: Tree): BType = lineNumber(tree) - liftStringConcat(tree) match { + liftStringConcat(tree) match // Optimization for expressions of the form "" + x case List(Literal(Constant("")), arg) => genLoad(arg, ObjectRef) @@ -1283,21 +1193,19 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { case concatenations => val concatArguments = concatenations.view - .filter { + .filter: case Literal(Constant("")) => false // empty strings are no-ops in concatenation case _ => true - } - .map { + .map: case Apply(boxOp, value :: Nil) if Erasure.Boxing.isBox(boxOp.symbol) && boxOp.symbol.denot.owner != defn.UnitModuleClass => // Eliminate boxing of primitive values. Boxing is introduced by erasure because // there's only a single synthetic `+` method "added" to the string class. value case other => other - } .toList // `StringConcatFactory` only got added in JDK 9, so use `StringBuilder` for lower - if (backendUtils.classfileVersion < asm.Opcodes.V9) { + if (backendUtils.classfileVersion < asm.Opcodes.V9) // Estimate capacity needed for the string builder val approxBuilderSize = concatArguments.view.map { @@ -1308,15 +1216,14 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { bc.genNewStringBuilder(approxBuilderSize) stackHeight += 1 // during the genLoad below, there is a reference to the StringBuilder on the stack - for (elem <- concatArguments) { + for (elem <- concatArguments) val elemType = tpeTK(elem) genLoad(elem, elemType) bc.genStringBuilderAppend(elemType) - } stackHeight -= 1 bc.genStringBuilderEnd - } else { + else /* `StringConcatFactory#makeConcatWithConstants` accepts max 200 argument slots. If * the string concatenation is longer (unlikely), we spill into multiple calls @@ -1333,12 +1240,12 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val savedStackHeight = stackHeight - for (elem <- concatArguments) { + for (elem <- concatArguments) val tpe = tpeTK(elem) val elemSlots = tpe.size // Unlikely spill case - if (totalArgSlots + elemSlots >= MaxIndySlots) { + if (totalArgSlots + elemSlots >= MaxIndySlots) stackHeight = savedStackHeight + countConcats bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) countConcats += 1 @@ -1346,17 +1253,15 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { recipe.setLength(0) argTypes.clear() constVals.clear() - } - elem match { + elem match case Literal(Constant(s: String)) => - if (s.contains(TagArg) || s.contains(TagConst)) { + if (s.contains(TagArg) || s.contains(TagConst)) totalArgSlots += elemSlots recipe.append(TagConst) constVals += s - } else { + else recipe.append(s) - } case other => totalArgSlots += elemSlots @@ -1365,34 +1270,28 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { argTypes += tpe.toASMType genLoad(elem, tpe) stackHeight += 1 - } - } stackHeight = savedStackHeight bc.genIndyStringConcat(recipe.toString, argTypes.result(), constVals.result()) // If we spilled, generate one final concat - if (countConcats > 1) { + if (countConcats > 1) bc.genIndyStringConcat( TagArg.toString * countConcats, Seq.fill(countConcats)(StringRef.toASMType), Seq.empty ) - } - } - } StringRef - } /** * Generate a method invocation. If `specificReceiver != null`, it is used as receiver in the * invocation instruction, otherwise `method.owner`. A specific receiver class is needed to * prevent an IllegalAccessError, (aladdin bug 455). */ - def genCallMethod(method: Symbol, style: InvokeStyle, pos: Span = NoSpan, specificReceiver: Symbol = null): BType = { + def genCallMethod(method: Symbol, style: InvokeStyle, pos: Span = NoSpan, specificReceiver: Symbol = null): BType = val methodOwner = method.owner // the class used in the invocation's method descriptor in the classfile - val receiverClass = { + val receiverClass = if (specificReceiver != null) assert(style.isVirtual || specificReceiver == methodOwner, s"specificReceiver can only be specified for virtual calls. $method - $specificReceiver") @@ -1412,17 +1311,14 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { // - using U results in a `NoSuchMethodError: U.clone. This is the JVM bug. // Note that a mixin forwarder is generated, so the correct method is executed in the end: // class C { override def clone(): Object = super[T].clone() } - val isTraitMethodOverridingObjectMember = { + val isTraitMethodOverridingObjectMember = receiver != methodOwner && // fast path - the boolean is used to pick either of these two, if they are the same it does not matter style.isVirtual && isEmittedInterface(receiver) && - defn.ObjectType.decl(method.name).symbol.exists && { // fast path - compute overrideChain on the next line only if necessary + defn.ObjectType.decl(method.name).symbol.exists `&&`: // fast path - compute overrideChain on the next line only if necessary val syms = method.allOverriddenSymbols.toList !syms.isEmpty && syms.last.owner == defn.ObjectClass - } - } if (isTraitMethodOverridingObjectMember) methodOwner else receiver - } receiverClass.info // ensure types the type is up to date; erasure may add lateINTERFACE to traits val receiverName = internalName(receiverClass) @@ -1433,40 +1329,36 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { val isInterface = isEmittedInterface(receiverClass) import InvokeStyle._ - if (style == Super) { - if (isInterface && !method.is(JavaDefined)) { + if (style == Super) + if (isInterface && !method.is(JavaDefined)) val args = new Array[BType](bmType.argumentTypes.length + 1) val ownerBType = toTypeKind(method.owner.info) bmType.argumentTypes.copyToArray(args, 1) val staticDesc = MethodBType(ownerBType :: bmType.argumentTypes, bmType.returnType).descriptor val staticName = traitSuperAccessorName(method) bc.invokestatic(receiverName, staticName, staticDesc, isInterface) - } else { + else bc.invokespecial(receiverName, jname, mdescr, isInterface) - } - } else { - val opc = style match { + else + val opc = style match case Static => Opcodes.INVOKESTATIC case Special => Opcodes.INVOKESPECIAL case Virtual => if (isInterface) Opcodes.INVOKEINTERFACE else Opcodes.INVOKEVIRTUAL - } bc.emitInvoke(opc, receiverName, jname, mdescr, isInterface) - } bmType.returnType - } // end of genCallMethod() + // end of genCallMethod() /* Generate the scala ## method. */ - def genScalaHash(tree: Tree): BType = { + def genScalaHash(tree: Tree): BType = genLoad(tree, ObjectRef) genCallMethod(NoSymbol, InvokeStyle.Static) // used to dispatch ## on primitives to ScalaRuntime.hash. Should be implemented by a miniphase - } /* * Returns a list of trees that each should be concatenated, from left to right. * It turns a chained call like "a".+("b").+("c") into a list of arguments. */ - def liftStringConcat(tree: Tree): List[Tree] = tree match { + def liftStringConcat(tree: Tree): List[Tree] = tree match case tree @ Apply(fun @ DesugaredSelect(larg, method), rarg) => if (isPrimitive(fun) && primitives.getPrimitive(tree, larg.tpe) == ScalaPrimitivesOps.CONCAT) @@ -1475,45 +1367,39 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { tree :: Nil case _ => tree :: Nil - } /* Emit code to compare the two top-most stack values using the 'op' operator. */ - private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { + private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = if (targetIfNoJump == success) genCJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated) - else { - if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT + else + if (tk.isIntSizedType) // BOOL, BYTE, CHAR, SHORT, or INT bc.emitIF_ICMP(op, success) - } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) + else if (tk.isRef) // REFERENCE(_) | ARRAY(_) bc.emitIF_ACMP(op, success) - } else { + else import Primitives._ def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE - (tk: @unchecked) match { + (tk: @unchecked) match case LONG => emit(asm.Opcodes.LCMP) case FLOAT => emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL) case DOUBLE => emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL) - } bc.emitIF(op, success) - } if (targetIfNoJump != failure) bc goTo failure - } - } /* Emits code to compare (and consume) stack-top and zero using the 'op' operator */ - private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = { + private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false): Unit = import Primitives._ if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate(), tk, targetIfNoJump, negated = !negated) - else { - if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT + else + if (tk.isIntSizedType) // BOOL, BYTE, CHAR, SHORT, or INT bc.emitIF(op, success) - } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) - (op: @unchecked) match { // references are only compared with EQ and NE + else if (tk.isRef) // REFERENCE(_) | ARRAY(_) + (op: @unchecked) match // references are only compared with EQ and NE case EQ => bc emitIFNULL success case NE => bc emitIFNONNULL success - } - } else { + else def useCmpG = if (negated) op == GT || op == GE else op == LT || op == LE - (tk: @unchecked) match { + (tk: @unchecked) match case LONG => emit(asm.Opcodes.LCONST_0) emit(asm.Opcodes.LCMP) @@ -1523,14 +1409,10 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { case DOUBLE => emit(asm.Opcodes.DCONST_0) emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL) - } bc.emitIF(op, success) - } if (targetIfNoJump != failure) bc goTo failure - } - } - def testOpForPrimitive(primitiveCode: Int) = (primitiveCode: @switch) match { + def testOpForPrimitive(primitiveCode: Int) = (primitiveCode: @switch) match case ScalaPrimitivesOps.ID => Primitives.EQ case ScalaPrimitivesOps.NI => Primitives.NE case ScalaPrimitivesOps.EQ => Primitives.EQ @@ -1539,43 +1421,38 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { case ScalaPrimitivesOps.LE => Primitives.LE case ScalaPrimitivesOps.GT => Primitives.GT case ScalaPrimitivesOps.GE => Primitives.GE - } /* * Generate code for conditional expressions. * The jump targets success/failure of the test are `then-target` and `else-target` resp. */ - private def genCond(tree: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = { + private def genCond(tree: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = - def genComparisonOp(l: Tree, r: Tree, code: Int): Unit = { + def genComparisonOp(l: Tree, r: Tree, code: Int): Unit = val op = testOpForPrimitive(code) - def isNull(t: Tree): Boolean = t match { + def isNull(t: Tree): Boolean = t match case Literal(Constant(null)) => true case _ => false - } def ifOneIsNull(l: Tree, r: Tree): Tree = if (isNull(l)) r else if (isNull(r)) l else null val nonNullSide = if (ScalaPrimitivesOps.isReferenceEqualityOp(code)) ifOneIsNull(l, r) else null - if (nonNullSide != null) { + if (nonNullSide != null) // special-case reference (in)equality test for null (null eq x, x eq null) genLoad(nonNullSide, ObjectRef) genCZJUMP(success, failure, op, ObjectRef, targetIfNoJump) - } else { + else val tk = tpeTK(l).maxType(tpeTK(r)) genLoad(l, tk) stackHeight += tk.size genLoad(r, tk) stackHeight -= tk.size genCJUMP(success, failure, op, tk, targetIfNoJump) - } - } - def loadAndTestBoolean() = { + def loadAndTestBoolean() = genLoad(tree, BOOL) genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } lineNumber(tree) - tree match { + tree match case tree @ Apply(fun, args) if primitives.isPrimitive(fun.symbol) => import ScalaPrimitivesOps.{ ZNOT, ZAND, ZOR, EQ } @@ -1584,7 +1461,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { lazy val DesugaredSelect(lhs, _) = fun: @unchecked val rhs = if (args.isEmpty) tpd.EmptyTree else args.head // args.isEmpty only for ZNOT - def genZandOrZor(and: Boolean): Unit = { + def genZandOrZor(and: Boolean): Unit = // reaching "keepGoing" indicates the rhs should be evaluated too (ie not short-circuited). val keepGoing = new asm.Label @@ -1593,22 +1470,20 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { markProgramPoint(keepGoing) genCond(rhs, success, failure, targetIfNoJump) - } - primitives.getPrimitive(fun.symbol) match { + primitives.getPrimitive(fun.symbol) match case ZNOT => genCond(lhs, failure, success, targetIfNoJump) case ZAND => genZandOrZor(and = true) case ZOR => genZandOrZor(and = false) case code => - if (ScalaPrimitivesOps.isUniversalEqualityOp(code) && tpeTK(lhs).isClass) { + if (ScalaPrimitivesOps.isUniversalEqualityOp(code) && tpeTK(lhs).isClass) // rewrite `==` to null tests and `equals`. not needed for arrays (`equals` is reference equality). if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure, targetIfNoJump) else genEqEqPrimitive(lhs, rhs, failure, success, targetIfNoJump) - } else if (ScalaPrimitivesOps.isComparisonOp(code)) { + else if (ScalaPrimitivesOps.isComparisonOp(code)) genComparisonOp(lhs, rhs, code) - } else + else loadAndTestBoolean() - } case Block(stats, expr) => /* Push the decision further down the `expr`. @@ -1631,9 +1506,8 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { genCond(elsep, success, failure, targetIfNoJump) case _ => loadAndTestBoolean() - } - } // end of genCond() + // end of genCond() /* * Generate the "==" code for object references. It is equivalent of @@ -1642,7 +1516,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { * @param l left-hand-side of the '==' * @param r right-hand-side of the '==' */ - def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = { + def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label, targetIfNoJump: asm.Label): Unit = /* True if the equality comparison is between values that require the use of the rich equality * comparator (scala.runtime.Comparator.equals). This is the case when either side of the @@ -1650,33 +1524,29 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { * When it is statically known that both sides are equal and subtypes of Number of Character, * not using the rich equality is possible (their own equals method will do ok.) */ - val mustUseAnyComparator: Boolean = { + val mustUseAnyComparator: Boolean = val areSameFinals = l.tpe.typeSymbol.is(Final) && r.tpe.typeSymbol.is(Final) && (l.tpe =:= r.tpe) // todo: remove - def isMaybeBoxed(sym: Symbol): Boolean = { + def isMaybeBoxed(sym: Symbol): Boolean = (sym == defn.ObjectClass) || (sym == defn.JavaSerializableClass) || (sym == defn.ComparableClass) || (sym derivesFrom defn.BoxedNumberClass) || (sym derivesFrom defn.BoxedCharClass) || (sym derivesFrom defn.BoxedBooleanClass) - } !areSameFinals && isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol) - } - def isNull(t: Tree): Boolean = t match { + def isNull(t: Tree): Boolean = t match case Literal(Constant(null)) => true case _ => false - } def isNonNullExpr(t: Tree): Boolean = t.isInstanceOf[Literal] || ((t.symbol ne null) && t.symbol.is(Module)) - if (mustUseAnyComparator) { - val equalsMethod: Symbol = { - if (l.tpe <:< defn.BoxedNumberClass.info) { + if (mustUseAnyComparator) + val equalsMethod: Symbol = + if (l.tpe <:< defn.BoxedNumberClass.info) if (r.tpe <:< defn.BoxedNumberClass.info) defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) else if (r.tpe <:< defn.BoxedCharClass.info) NoSymbol // ctx.requiredMethod(BoxesRunTimeTypeRef, nme.equalsNumChar) // this method is private else defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumObject) - } else defn.BoxesRunTimeModule_externalEquals - } + else defn.BoxesRunTimeModule_externalEquals genLoad(l, ObjectRef) stackHeight += 1 @@ -1684,17 +1554,16 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { stackHeight -= 1 genCallMethod(equalsMethod, InvokeStyle.Static) genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } - else { - if (isNull(l)) { + else + if (isNull(l)) // null == expr -> expr eq null genLoad(r, ObjectRef) genCZJUMP(success, failure, Primitives.EQ, ObjectRef, targetIfNoJump) - } else if (isNull(r)) { + else if (isNull(r)) // expr == null -> expr eq null genLoad(l, ObjectRef) genCZJUMP(success, failure, Primitives.EQ, ObjectRef, targetIfNoJump) - } else if (isNonNullExpr(l)) { + else if (isNonNullExpr(l)) // SI-7852 Avoid null check if L is statically non-null. genLoad(l, ObjectRef) stackHeight += 1 @@ -1702,7 +1571,7 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { stackHeight -= 1 genCallMethod(defn.Any_equals, InvokeStyle.Virtual) genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } else { + else // l == r -> if (l eq null) r eq null else l.equals(r) val eqEqTempLocal = locals.makeLocal(ObjectRef, nme.EQEQ_LOCAL_VAR.mangledString, defn.ObjectType, r.span) val lNull = new asm.Label @@ -1725,15 +1594,12 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { locals.load(eqEqTempLocal) genCallMethod(defn.Any_equals, InvokeStyle.Virtual) genCZJUMP(success, failure, Primitives.NE, BOOL, targetIfNoJump) - } - } - } def genSynchronized(tree: Apply, expectedType: BType): BType def genLoadTry(tree: Try): BType - def genInvokeDynamicLambda(ctor: Symbol, lambdaTarget: Symbol, environmentSize: Int, functionalInterface: Symbol): BType = { + def genInvokeDynamicLambda(ctor: Symbol, lambdaTarget: Symbol, environmentSize: Int, functionalInterface: Symbol): BType = import java.lang.invoke.LambdaMetafactory.{FLAG_BRIDGES, FLAG_SERIALIZABLE} report.debuglog(s"Using invokedynamic rather than `new ${ctor.owner}`") @@ -1767,24 +1633,21 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { // TODO specialization val instantiatedMethodType = new MethodBType(lambdaParamTypes.map(p => toTypeKind(p)), toTypeKind(lambdaTarget.info.resultType)).toASMType - val samMethod = atPhase(erasurePhase) { + val samMethod = atPhase(erasurePhase): val samMethods = toDenot(functionalInterface).info.possibleSamMethods.toList - samMethods match { + samMethods match case x :: Nil => x.symbol case Nil => abort(s"${functionalInterface.show} is not a functional interface. It doesn't have abstract methods") case xs => abort(s"${functionalInterface.show} is not a functional interface. " + s"It has the following abstract methods: ${xs.map(_.name).mkString(", ")}") - } - } val methodName = samMethod.javaSimpleName val samMethodType = asmMethodType(samMethod).toASMType // scala/bug#10334: make sure that a lambda object for `T => U` has a method `apply(T)U`, not only the `(Object)Object` // version. Using the lambda a structural type `{def apply(t: T): U}` causes a reflective lookup for this method. val needsGenericBridge = samMethodType != instantiatedMethodType - val bridgeMethods = atPhase(erasurePhase){ + val bridgeMethods = atPhase(erasurePhase): samMethod.allOverriddenSymbols.toList - } val overriddenMethodTypes = bridgeMethods.map(b => asmMethodType(b).toASMType) // any methods which `samMethod` overrides need bridges made for them @@ -1817,8 +1680,6 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { bc.jmethod.visitInvokeDynamicInsn(methodName, desc, metafactory, bsmArgs: _*) generatedType - } - } /** Does this symbol actually correspond to an interface that will be emitted? * In the backend, this should be preferred over `isInterface` because it @@ -1830,4 +1691,3 @@ trait BCodeBodyBuilder extends BCodeSkelBuilder { sym.is(JavaDefined) && (toDenot(sym).isAnnotation || sym.is(ModuleClass) && (sym.companionClass.is(PureInterface)) || sym.companionClass.is(Trait)) -} diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala index c36c8c546635..d73508314593 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeHelpers.scala @@ -42,7 +42,7 @@ import dotty.tools.backend.jvm.DottyBackendInterface.symExtensions * @version 1.0 * */ -trait BCodeHelpers extends BCodeIdiomatic { +trait BCodeHelpers extends BCodeIdiomatic: // for some reason singleton types aren't allowed in constructor calls. will need several casts in code to enforce //import global._ import bTypes._ @@ -65,26 +65,22 @@ trait BCodeHelpers extends BCodeIdiomatic { val bCodeAsmCommon: BCodeAsmCommon[int.type] = new BCodeAsmCommon(int) - final def traitSuperAccessorName(sym: Symbol): String = { + final def traitSuperAccessorName(sym: Symbol): String = val nameString = sym.javaSimpleName.toString if (sym.name == nme.TRAIT_CONSTRUCTOR) nameString else nameString + "$" - } /* * can-multi-thread */ - def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = { - new asm.Attribute(name) { + def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = + new asm.Attribute(name): override def write(classWriter: ClassWriter, code: Array[Byte], - codeLength: Int, maxStack: Int, maxLocals: Int): asm.ByteVector = { + codeLength: Int, maxStack: Int, maxLocals: Int): asm.ByteVector = val byteVector = new asm.ByteVector(len) byteVector.putByteArray(b, offset, len) byteVector - } - } - } /* * Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only @@ -98,35 +94,32 @@ trait BCodeHelpers extends BCodeIdiomatic { * while the "Signature" attribute can be associated to classes, methods, and fields.) * */ - trait BCPickles { + trait BCPickles: import dotty.tools.dotc.core.unpickleScala2.{ PickleFormat, PickleBuffer } - val versionPickle = { + val versionPickle = val vp = new PickleBuffer(new Array[Byte](16), -1, 0) assert(vp.writeIndex == 0, vp) vp writeNat PickleFormat.MajorVersion vp writeNat PickleFormat.MinorVersion vp writeNat 0 vp - } /* * can-multi-thread */ - def pickleMarkerLocal = { + def pickleMarkerLocal = createJAttribute(ScalaSignatureATTRName, versionPickle.bytes, 0, versionPickle.writeIndex) - } /* * can-multi-thread */ - def pickleMarkerForeign = { + def pickleMarkerForeign = createJAttribute(ScalaATTRName, new Array[Byte](0), 0, 0) - } - } // end of trait BCPickles + // end of trait BCPickles - trait BCInnerClassGen { + trait BCInnerClassGen: def debugLevel = 3 // 0 -> no debug info; 1-> filename; 2-> lines; 3-> varnames @@ -137,24 +130,21 @@ trait BCodeHelpers extends BCodeIdiomatic { /** * The class internal name for a given class symbol. */ - final def internalName(sym: Symbol): String = { + final def internalName(sym: Symbol): String = // For each java class, the scala compiler creates a class and a module (thus a module class). // If the `sym` is a java module class, we use the java class instead. This ensures that the // ClassBType is created from the main class (instead of the module class). // The two symbols have the same name, so the resulting internalName is the same. val classSym = if (sym.is(JavaDefined) && sym.is(ModuleClass)) sym.linkedClass else sym getClassBType(classSym).internalName - } - private def assertClassNotArray(sym: Symbol): Unit = { + private def assertClassNotArray(sym: Symbol): Unit = assert(sym.isClass, sym) assert(sym != defn.ArrayClass || compilingArray, sym) - } - private def assertClassNotArrayNotPrimitive(sym: Symbol): Unit = { + private def assertClassNotArrayNotPrimitive(sym: Symbol): Unit = assertClassNotArray(sym) assert(!primitiveTypeMap.contains(sym) || isCompilingPrimitive, sym) - } /** * The ClassBType for a class symbol. @@ -169,24 +159,22 @@ trait BCodeHelpers extends BCodeIdiomatic { * the class descriptor of the receiver (the implementation class) is obtained by creating the * ClassBType. */ - final def getClassBType(sym: Symbol): ClassBType = { + final def getClassBType(sym: Symbol): ClassBType = assertClassNotArrayNotPrimitive(sym) if (sym == defn.NothingClass) srNothingRef else if (sym == defn.NullClass) srNullRef else classBTypeFromSymbol(sym) - } /* * must-single-thread */ - final def asmMethodType(msym: Symbol): MethodBType = { + final def asmMethodType(msym: Symbol): MethodBType = assert(msym.is(Method), s"not a method-symbol: $msym") val resT: BType = if (msym.isClassConstructor || msym.isConstructor) UNIT else toTypeKind(msym.info.resultType) MethodBType(msym.info.firstParamTypes map toTypeKind, resT) - } /** * The jvm descriptor of a type. @@ -200,42 +188,39 @@ trait BCodeHelpers extends BCodeIdiomatic { final def toTypeKind(tp: Type): BType = typeToTypeKind(tp)(BCodeHelpers.this)(this) - } // end of trait BCInnerClassGen + // end of trait BCInnerClassGen - trait BCAnnotGen extends BCInnerClassGen { + trait BCAnnotGen extends BCInnerClassGen: /* * must-single-thread */ def emitAnnotations(cw: asm.ClassVisitor, annotations: List[Annotation]): Unit = - for(annot <- annotations; if shouldEmitAnnotation(annot)) { + for(annot <- annotations; if shouldEmitAnnotation(annot)) val typ = annot.tree.tpe val assocs = assocsFromApply(annot.tree) val av = cw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) emitAssocs(av, assocs, BCodeHelpers.this)(this) - } /* * must-single-thread */ def emitAnnotations(mw: asm.MethodVisitor, annotations: List[Annotation]): Unit = - for(annot <- annotations; if shouldEmitAnnotation(annot)) { + for(annot <- annotations; if shouldEmitAnnotation(annot)) val typ = annot.tree.tpe val assocs = assocsFromApply(annot.tree) val av = mw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) emitAssocs(av, assocs, BCodeHelpers.this)(this) - } /* * must-single-thread */ def emitAnnotations(fw: asm.FieldVisitor, annotations: List[Annotation]): Unit = - for(annot <- annotations; if shouldEmitAnnotation(annot)) { + for(annot <- annotations; if shouldEmitAnnotation(annot)) val typ = annot.tree.tpe val assocs = assocsFromApply(annot.tree) val av = fw.visitAnnotation(typeDescriptor(typ), isRuntimeVisible(annot)) emitAssocs(av, assocs, BCodeHelpers.this)(this) - } /* * must-single-thread @@ -253,42 +238,38 @@ trait BCodeHelpers extends BCodeIdiomatic { val annotationss = pannotss map (_ filter shouldEmitAnnotation) if (annotationss forall (_.isEmpty)) return for ((annots, idx) <- annotationss.zipWithIndex; - annot <- annots) { + annot <- annots) val typ = annot.tree.tpe val assocs = assocsFromApply(annot.tree) val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, typeDescriptor(typ.asInstanceOf[Type]), isRuntimeVisible(annot)) emitAssocs(pannVisitor, assocs, BCodeHelpers.this)(this) - } - private def shouldEmitAnnotation(annot: Annotation): Boolean = { + private def shouldEmitAnnotation(annot: Annotation): Boolean = annot.symbol.is(JavaDefined) && retentionPolicyOf(annot) != AnnotationRetentionSourceAttr - } private def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, Object)], bcodeStore: BCodeHelpers) - (innerClasesStore: bcodeStore.BCInnerClassGen) = { + (innerClasesStore: bcodeStore.BCInnerClassGen) = for ((name, value) <- assocs) emitArgument(av, name.mangledString, value.asInstanceOf[Tree], bcodeStore)(innerClasesStore) av.visitEnd() - } private def emitArgument(av: AnnotationVisitor, name: String, - arg: Tree, bcodeStore: BCodeHelpers)(innerClasesStore: bcodeStore.BCInnerClassGen): Unit = { + arg: Tree, bcodeStore: BCodeHelpers)(innerClasesStore: bcodeStore.BCInnerClassGen): Unit = val narg = normalizeArgument(arg) // Transformation phases are not run on annotation trees, so we need to run // `constToLiteral` at this point. val t = atPhase(erasurePhase)(constToLiteral(narg)) - t match { + t match case Literal(const @ Constant(_)) => - const.tag match { + const.tag match case BooleanTag | ByteTag | ShortTag | CharTag | IntTag | LongTag | FloatTag | DoubleTag => av.visit(name, const.value) case StringTag => assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant` av.visit(name, const.stringValue) // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag case ClazzTag => av.visit(name, typeToTypeKind(TypeErasure.erasure(const.typeValue))(bcodeStore)(innerClasesStore).toASMType) - } case Ident(nme.WILDCARD) => // An underscore argument indicates that we want to use the default value for this parameter, so do not emit anything case t: tpd.RefTree if t.symbol.owner.linkedClass.isAllOf(JavaEnumTrait) => @@ -304,20 +285,18 @@ trait BCodeHelpers extends BCodeIdiomatic { toDenot(fun.symbol).owner == defn.ArrayClass.linkedClass && fun.symbol.name == nme.apply => val arrAnnotV: AnnotationVisitor = av.visitArray(name) - var actualArgs = if (fun.tpe.isImplicitMethod) { + var actualArgs = if (fun.tpe.isImplicitMethod) // generic array method, need to get implicit argument out of the way fun.asInstanceOf[Apply].args - } else args + else args val flatArgs = actualArgs.flatMap { arg => - normalizeArgument(arg) match { + normalizeArgument(arg) match case t: tpd.SeqLiteral => t.elems case e => List(e) - } } - for(arg <- flatArgs) { + for(arg <- flatArgs) emitArgument(arrAnnotV, null, arg, bcodeStore)(innerClasesStore) - } arrAnnotV.visitEnd() /* case sb @ ScalaSigBytes(bytes) => @@ -340,45 +319,37 @@ trait BCodeHelpers extends BCodeIdiomatic { case t => report.error(em"Annotation argument is not a constant", t.sourcePos) - } - } - private def normalizeArgument(arg: Tree): Tree = arg match { + private def normalizeArgument(arg: Tree): Tree = arg match case Trees.NamedArg(_, arg1) => normalizeArgument(arg1) case Trees.Typed(arg1, _) => normalizeArgument(arg1) case _ => arg - } private def isRuntimeVisible(annot: Annotation): Boolean = if (toDenot(annot.tree.tpe.typeSymbol).hasAnnotation(AnnotationRetentionAttr)) retentionPolicyOf(annot) == AnnotationRetentionRuntimeAttr - else { + else // SI-8926: if the annotation class symbol doesn't have a @RetentionPolicy annotation, the // annotation is emitted with visibility `RUNTIME` // dotty bug: #389 true - } private def retentionPolicyOf(annot: Annotation): Symbol = annot.tree.tpe.typeSymbol.getAnnotation(AnnotationRetentionAttr). flatMap(_.argument(0).map(_.tpe.termSymbol)).getOrElse(AnnotationRetentionClassAttr) - private def assocsFromApply(tree: Tree): List[(Name, Tree)] = { - tree match { + private def assocsFromApply(tree: Tree): List[(Name, Tree)] = + tree match case Block(_, expr) => assocsFromApply(expr) case Apply(fun, args) => - fun.tpe.widen match { + fun.tpe.widen match case MethodType(names) => - (names zip args).filter { + (names zip args).filter: case (_, t: tpd.Ident) if (t.tpe.normalizedPrefix eq NoPrefix) => false case _ => true - } - } - } - } - } // end of trait BCAnnotGen + // end of trait BCAnnotGen - trait BCJGenSigGen { + trait BCJGenSigGen: import int.given def getCurrentCUnit(): CompilationUnit @@ -392,24 +363,22 @@ trait BCodeHelpers extends BCodeIdiomatic { * Machine Specification, §4.3.4, or `null` if `sym` doesn't need a generic signature. * @see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.3.4 */ - def getGenericSignature(sym: Symbol, owner: Symbol): String = { - atPhase(erasurePhase) { + def getGenericSignature(sym: Symbol, owner: Symbol): String = + atPhase(erasurePhase): val memberTpe = if (sym.is(Method)) sym.denot.info else owner.denot.thisType.memberInfo(sym) getGenericSignatureHelper(sym, owner, memberTpe).orNull - } - } - } // end of trait BCJGenSigGen + // end of trait BCJGenSigGen - trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen { + trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen: /* Add a forwarder for method m. Used only from addForwarders(). * * must-single-thread */ - private def addForwarder(jclass: asm.ClassVisitor, module: Symbol, m: Symbol, isSynthetic: Boolean): Unit = { + private def addForwarder(jclass: asm.ClassVisitor, module: Symbol, m: Symbol, isSynthetic: Boolean): Unit = val moduleName = internalName(module) val methodInfo = module.thisType.memberInfo(m) val paramJavaTypes: List[BType] = methodInfo.firstParamTypes map toTypeKind @@ -455,11 +424,10 @@ trait BCodeHelpers extends BCodeIdiomatic { mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, str.MODULE_INSTANCE_FIELD, symDescriptor(module)) var index = 0 - for(jparamType <- paramJavaTypes) { + for(jparamType <- paramJavaTypes) mirrorMethod.visitVarInsn(jparamType.typedOpcode(asm.Opcodes.ILOAD), index) assert(!jparamType.isInstanceOf[MethodBType], jparamType) index += jparamType.size - } mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).descriptor, false) mirrorMethod.visitInsn(jReturnType.typedOpcode(asm.Opcodes.IRETURN)) @@ -467,7 +435,6 @@ trait BCodeHelpers extends BCodeIdiomatic { mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments mirrorMethod.visitEnd() - } /* Add forwarders for all methods defined in `module` that don't conflict * with methods in the companion class of `module`. A conflict arises when @@ -476,17 +443,16 @@ trait BCodeHelpers extends BCodeIdiomatic { * * must-single-thread */ - def addForwarders(jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol): Unit = { + def addForwarders(jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol): Unit = assert(moduleClass.is(ModuleClass), moduleClass) report.debuglog(s"Dumping mirror class for object: $moduleClass") val linkedClass = moduleClass.companionClass - lazy val conflictingNames: Set[Name] = { + lazy val conflictingNames: Set[Name] = (linkedClass.info.allMembers.collect { case d if d.name.isTermName => d.name }).toSet - } report.debuglog(s"Potentially conflicting names for forwarders: $conflictingNames") - for (m0 <- sortedMembersBasedOnFlags(moduleClass.info, required = Method, excluded = ExcludedForwarder)) { + for (m0 <- sortedMembersBasedOnFlags(moduleClass.info, required = Method, excluded = ExcludedForwarder)) val m = if (m0.is(Bridge)) m0.nextOverriddenSymbol else m0 if (m == NoSymbol) report.log(s"$m0 is a bridge method that overrides nothing, something went wrong in a previous phase.") @@ -496,7 +462,7 @@ trait BCodeHelpers extends BCodeIdiomatic { report.log(s"No forwarder for $m due to conflict with ${linkedClass.info.member(m.name)}") else if (m.accessBoundary(defn.RootClass) ne defn.RootClass) report.log(s"No forwarder for non-public member $m") - else { + else report.log(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'") // It would be simpler to not generate forwarders for these methods, // but that wouldn't be binary-compatible with Scala 3.0.0, so instead @@ -508,14 +474,11 @@ trait BCodeHelpers extends BCodeIdiomatic { // non-bridge overload. m0.is(Bridge) && m0.initial.validFor.firstPhaseId == erasurePhase.next.id addForwarder(jclass, moduleClass, m, isSynthetic) - } - } - } /** The members of this type that have all of `required` flags but none of `excluded` flags set. * The members are sorted by name and signature to guarantee a stable ordering. */ - private def sortedMembersBasedOnFlags(tp: Type, required: Flag, excluded: FlagSet): List[Symbol] = { + private def sortedMembersBasedOnFlags(tp: Type, required: Flag, excluded: FlagSet): List[Symbol] = // The output of `memberNames` is a Set, sort it to guarantee a stable ordering. val names = tp.memberNames(takeAllFilter).toSeq.sorted val buffer = mutable.ListBuffer[Symbol]() @@ -524,7 +487,6 @@ trait BCodeHelpers extends BCodeIdiomatic { .alternatives.sortBy(_.signature)(Signature.lexicographicOrdering).map(_.symbol) } buffer.toList - } /* * Quoting from JVMS 4.7.5 The Exceptions Attribute @@ -536,13 +498,12 @@ trait BCodeHelpers extends BCodeIdiomatic { * * must-single-thread */ - def getExceptions(excs: List[Annotation]): List[String] = { + def getExceptions(excs: List[Annotation]): List[String] = for (case ThrownException(exc) <- excs.distinct) yield internalName(TypeErasure.erasure(exc).classSymbol) - } - } // end of trait BCForwardersGen + // end of trait BCForwardersGen - trait BCClassGen extends BCInnerClassGen { + trait BCClassGen extends BCInnerClassGen: // Used as threshold above which a tableswitch bytecode instruction is preferred over a lookupswitch. // There's a space tradeoff between these multi-branch instructions (details in the JVM spec). @@ -554,7 +515,7 @@ trait BCodeHelpers extends BCodeIdiomatic { * * can-multi-thread */ - def addSerialVUID(id: Long, jclass: asm.ClassVisitor): Unit = { + def addSerialVUID(id: Long, jclass: asm.ClassVisitor): Unit = // add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)` jclass.visitField( GenBCodeOps.PrivateStaticFinal, @@ -563,8 +524,7 @@ trait BCodeHelpers extends BCodeIdiomatic { null, // no java-generic-signature java.lang.Long.valueOf(id) ).visitEnd() - } - } // end of trait BCClassGen + // end of trait BCClassGen /* functionality for building plain and mirror classes */ abstract class JCommonBuilder @@ -574,7 +534,7 @@ trait BCodeHelpers extends BCodeIdiomatic { with BCPickles { } /* builder of mirror classes */ - class JMirrorBuilder extends JCommonBuilder { + class JMirrorBuilder extends JCommonBuilder: private var cunit: CompilationUnit = _ def getCurrentCUnit(): CompilationUnit = cunit; @@ -587,7 +547,7 @@ trait BCodeHelpers extends BCodeIdiomatic { * * must-single-thread */ - def genMirrorClass(moduleClass: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = { + def genMirrorClass(moduleClass: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = assert(moduleClass.is(ModuleClass)) assert(moduleClass.companionClass == NoSymbol, moduleClass) this.cunit = cunit @@ -605,10 +565,9 @@ trait BCodeHelpers extends BCodeIdiomatic { EMPTY_STRING_ARRAY ) - if (emitSource) { + if (emitSource) mirrorClass.visitSource("" + cunit.source.file.name, null /* SourceDebugExtension */) - } val ssa = None // getAnnotPickle(mirrorName, if (moduleClass.is(Module)) moduleClass.companionClass else moduleClass.companionModule) mirrorClass.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign) @@ -620,11 +579,10 @@ trait BCodeHelpers extends BCodeIdiomatic { moduleClass.name // this side-effect is necessary, really. mirrorClass - } - } // end of class JMirrorBuilder + // end of class JMirrorBuilder - trait JAndroidBuilder { + trait JAndroidBuilder: self: BCInnerClassGen => /* From the reference documentation of the Android SDK: @@ -647,7 +605,7 @@ trait BCodeHelpers extends BCodeIdiomatic { /* * must-single-thread */ - def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String): Unit = { + def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String): Unit = val androidCreatorType = getClassBType(AndroidCreatorClass) val tdesc_creator = androidCreatorType.descriptor @@ -686,9 +644,8 @@ trait BCodeHelpers extends BCodeIdiomatic { "CREATOR", tdesc_creator ) - } - } // end of trait JAndroidBuilder + // end of trait JAndroidBuilder /** * This method returns the BType for a type reference, for example a parameter type. @@ -699,7 +656,7 @@ trait BCodeHelpers extends BCodeIdiomatic { * See also comment on getClassBTypeAndRegisterInnerClass, which is invoked for implementation * classes. */ - private def typeToTypeKind(tp: Type)(ct: BCodeHelpers)(storage: ct.BCInnerClassGen): ct.bTypes.BType = { + private def typeToTypeKind(tp: Type)(ct: BCodeHelpers)(storage: ct.BCInnerClassGen): ct.bTypes.BType = import ct.bTypes._ val defn = ctx.definitions import coreBTypes._ @@ -708,30 +665,27 @@ trait BCodeHelpers extends BCodeIdiomatic { * Primitive types are represented as TypeRefs to the class symbol of, for example, scala.Int. * The `primitiveTypeMap` maps those class symbols to the corresponding PrimitiveBType. */ - def primitiveOrClassToBType(sym: Symbol): BType = { + def primitiveOrClassToBType(sym: Symbol): BType = assert(sym.isClass, sym) assert(sym != defn.ArrayClass || compilingArray, sym) primitiveTypeMap.getOrElse(sym, storage.getClassBType(sym)).asInstanceOf[BType] - } /** * When compiling Array.scala, the type parameter T is not erased and shows up in method * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference. */ - def nonClassTypeRefToBType(sym: Symbol): ClassBType = { + def nonClassTypeRefToBType(sym: Symbol): ClassBType = assert(sym.isType && compilingArray, sym) ObjectRef.asInstanceOf[ct.bTypes.ClassBType] - } - tp.widenDealias match { + tp.widenDealias match case JavaArrayType(el) =>ArrayBType(typeToTypeKind(el)(ct)(storage)) // Array type such as Array[Int] (kept by erasure) case t: TypeRef => - t.info match { + t.info match case _ => if (!t.symbol.isClass) nonClassTypeRefToBType(t.symbol) // See comment on nonClassTypeRefToBType else primitiveOrClassToBType(t.symbol) // Common reference to a type such as scala.Int or java.lang.String - } case Types.ClassInfo(_, sym, _, _, _) => primitiveOrClassToBType(sym) // We get here, for example, for genLoadModule, which invokes toTypeKind(moduleClassSymbol.info) /* AnnotatedType should (probably) be eliminated by erasure. However we know it happens for @@ -752,41 +706,34 @@ trait BCodeHelpers extends BCodeIdiomatic { s"an unexpected type representation reached the compiler backend while compiling ${ctx.compilationUnit}: $tp. " + "If possible, please file a bug on https://github.com/lampepfl/dotty/issues") - tp match { + tp match case tp: ThisType if tp.cls == defn.ArrayClass => ObjectRef.asInstanceOf[ct.bTypes.ClassBType] // was introduced in 9b17332f11 to fix SI-999, but this code is not reached in its test, or any other test case tp: ThisType => storage.getClassBType(tp.cls) // case t: SingletonType => primitiveOrClassToBType(t.classSymbol) case t: SingletonType => typeToTypeKind(t.underlying)(ct)(storage) case t: RefinedType => typeToTypeKind(t.parent)(ct)(storage) //parents.map(_.toTypeKind(ct)(storage).asClassBType).reduceLeft((a, b) => a.jvmWiseLUB(b)) - } - } - } - private def getGenericSignatureHelper(sym: Symbol, owner: Symbol, memberTpe: Type)(using Context): Option[String] = { - if (needsGenericSignature(sym)) { + private def getGenericSignatureHelper(sym: Symbol, owner: Symbol, memberTpe: Type)(using Context): Option[String] = + if (needsGenericSignature(sym)) val erasedTypeSym = TypeErasure.fullErasure(sym.denot.info).typeSymbol - if (erasedTypeSym.isPrimitiveValueClass) { + if (erasedTypeSym.isPrimitiveValueClass) // Suppress signatures for symbols whose types erase in the end to primitive // value types. This is needed to fix #7416. None - } else { + else val jsOpt = GenericSignatures.javaSig(sym, memberTpe) - if (ctx.settings.XverifySignatures.value) { + if (ctx.settings.XverifySignatures.value) jsOpt.foreach(verifySignature(sym, _)) - } jsOpt - } - } else { + else None - } - } - private def verifySignature(sym: Symbol, sig: String)(using Context): Unit = { + private def verifySignature(sym: Symbol, sig: String)(using Context): Unit = import scala.tools.asm.util.CheckClassAdapter - def wrap(body: => Unit): Unit = { + def wrap(body: => Unit): Unit = try body - catch { + catch case ex: Throwable => report.error( em"""|compiler bug: created invalid generic signature for $sym in ${sym.denot.owner.showFullName} @@ -794,21 +741,14 @@ trait BCodeHelpers extends BCodeIdiomatic { |if this is reproducible, please report bug at https://github.com/lampepfl/dotty/issues """, sym.sourcePos) throw ex - } - } - wrap { - if (sym.is(Method)) { + wrap: + if (sym.is(Method)) CheckClassAdapter.checkMethodSignature(sig) - } - else if (sym.isTerm) { + else if (sym.isTerm) CheckClassAdapter.checkFieldSignature(sig) - } - else { + else CheckClassAdapter.checkClassSignature(sig) - } - } - } // @M don't generate java generics sigs for (members of) implementation // classes, as they are monomorphic (TODO: ok?) @@ -825,7 +765,7 @@ trait BCodeHelpers extends BCodeIdiomatic { || sym.is(Bridge) ) - private def getStaticForwarderGenericSignature(sym: Symbol, moduleClass: Symbol): String = { + private def getStaticForwarderGenericSignature(sym: Symbol, moduleClass: Symbol): String = // scala/bug#3452 Static forwarder generation uses the same erased signature as the method if forwards to. // By rights, it should use the signature as-seen-from the module class, and add suitable // primitive and value-class boxing/unboxing. @@ -837,20 +777,17 @@ trait BCodeHelpers extends BCodeIdiomatic { if (erasedMemberType =:= sym.denot.info) getGenericSignatureHelper(sym, moduleClass, memberTpe).orNull else null - } - def abort(msg: String): Nothing = { + def abort(msg: String): Nothing = report.error(msg) throw new RuntimeException(msg) - } private def compilingArray(using Context) = ctx.compilationUnit.source.file.name == "Array.scala" -} -object BCodeHelpers { +object BCodeHelpers: - class InvokeStyle(val style: Int) extends AnyVal { + class InvokeStyle(val style: Int) extends AnyVal: import InvokeStyle._ def isVirtual: Boolean = this == Virtual def isStatic : Boolean = this == Static @@ -858,14 +795,12 @@ object BCodeHelpers { def isSuper : Boolean = this == Super def hasInstance = this != Static - } - object InvokeStyle { + object InvokeStyle: val Virtual = new InvokeStyle(0) // InvokeVirtual or InvokeInterface val Static = new InvokeStyle(1) // InvokeStatic val Special = new InvokeStyle(2) // InvokeSpecial (private methods, constructors) val Super = new InvokeStyle(3) // InvokeSpecial (super calls) - } /** An attachment on Apply nodes indicating that it should be compiled with * `invokespecial` instead of `invokevirtual`. This is used for static @@ -874,4 +809,3 @@ object BCodeHelpers { */ val UseInvokeSpecial = new dotc.util.Property.Key[Unit] -} diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala index 42f8ef7f4ef6..3b5f10d00800 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeIdiomatic.scala @@ -17,7 +17,7 @@ import dotty.tools.dotc.report * @version 1.0 * */ -trait BCodeIdiomatic { +trait BCodeIdiomatic: val int: DottyBackendInterface val bTypes: BTypesFromSymbols[int.type] @@ -37,61 +37,54 @@ trait BCodeIdiomatic { val EMPTY_BTYPE_ARRAY = Array.empty[BType] /* can-multi-thread */ - final def mkArrayB(xs: List[BType]): Array[BType] = { + final def mkArrayB(xs: List[BType]): Array[BType] = if (xs.isEmpty) { return EMPTY_BTYPE_ARRAY } val a = new Array[BType](xs.size); xs.copyToArray(a); a - } /* can-multi-thread */ - final def mkArrayS(xs: List[String]): Array[String] = { + final def mkArrayS(xs: List[String]): Array[String] = if (xs.isEmpty) { return EMPTY_STRING_ARRAY } val a = new Array[String](xs.size); xs.copyToArray(a); a - } /* can-multi-thread */ - final def mkArrayL(xs: List[asm.Label]): Array[asm.Label] = { + final def mkArrayL(xs: List[asm.Label]): Array[asm.Label] = if (xs.isEmpty) { return EMPTY_LABEL_ARRAY } val a = new Array[asm.Label](xs.size); xs.copyToArray(a); a - } /* * can-multi-thread */ - final def mkArrayReverse(xs: List[String]): Array[String] = { + final def mkArrayReverse(xs: List[String]): Array[String] = val len = xs.size if (len == 0) { return EMPTY_STRING_ARRAY } val a = new Array[String](len) var i = len - 1 var rest = xs - while (!rest.isEmpty) { + while (!rest.isEmpty) a(i) = rest.head rest = rest.tail i -= 1 - } a - } /* * can-multi-thread */ - final def mkArrayReverse(xs: List[Int]): Array[Int] = { + final def mkArrayReverse(xs: List[Int]): Array[Int] = val len = xs.size if (len == 0) { return EMPTY_INT_ARRAY } val a = new Array[Int](len) var i = len - 1 var rest = xs - while (!rest.isEmpty) { + while (!rest.isEmpty) a(i) = rest.head rest = rest.tail i -= 1 - } a - } /* Just a namespace for utilities that encapsulate MethodVisitor idioms. * In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role, * but the methods here allow choosing when to transition from ICode to ASM types * (including not at all, e.g. for performance). */ - abstract class JCodeMethodN { + abstract class JCodeMethodN: def jmethod: asm.tree.MethodNode @@ -102,11 +95,11 @@ trait BCodeIdiomatic { /* * can-multi-thread */ - final def genPrimitiveArithmetic(op: ArithmeticOp, kind: BType): Unit = { + final def genPrimitiveArithmetic(op: ArithmeticOp, kind: BType): Unit = import Primitives.{ ADD, SUB, MUL, DIV, REM, NOT } - op match { + op match case ADD => add(kind) case SUB => sub(kind) @@ -115,30 +108,28 @@ trait BCodeIdiomatic { case REM => rem(kind) case NOT => - if (kind.isIntSizedType) { + if (kind.isIntSizedType) emit(Opcodes.ICONST_M1) emit(Opcodes.IXOR) - } else if (kind == LONG) { + else if (kind == LONG) jmethod.visitLdcInsn(java.lang.Long.valueOf(-1)) jmethod.visitInsn(Opcodes.LXOR) - } else { + else abort(s"Impossible to negate an $kind") - } case _ => abort(s"Unknown arithmetic primitive $op") - } - } // end of method genPrimitiveArithmetic() + // end of method genPrimitiveArithmetic() /* * can-multi-thread */ - final def genPrimitiveLogical(op: /* LogicalOp */ Int, kind: BType): Unit = { + final def genPrimitiveLogical(op: /* LogicalOp */ Int, kind: BType): Unit = import ScalaPrimitivesOps.{ AND, OR, XOR } - ((op, kind): @unchecked) match { + ((op, kind): @unchecked) match case (AND, LONG) => emit(Opcodes.LAND) case (AND, INT) => emit(Opcodes.IAND) case (AND, _) => @@ -156,18 +147,17 @@ trait BCodeIdiomatic { case (XOR, _) => emit(Opcodes.IXOR) if (kind != BOOL) { emitT2T(INT, kind) } - } - } // end of method genPrimitiveLogical() + // end of method genPrimitiveLogical() /* * can-multi-thread */ - final def genPrimitiveShift(op: /* ShiftOp */ Int, kind: BType): Unit = { + final def genPrimitiveShift(op: /* ShiftOp */ Int, kind: BType): Unit = import ScalaPrimitivesOps.{ LSL, ASR, LSR } - ((op, kind): @unchecked) match { + ((op, kind): @unchecked) match case (LSL, LONG) => emit(Opcodes.LSHL) case (LSL, INT) => emit(Opcodes.ISHL) case (LSL, _) => @@ -185,15 +175,14 @@ trait BCodeIdiomatic { case (LSR, _) => emit(Opcodes.IUSHR) emitT2T(INT, kind) - } - } // end of method genPrimitiveShift() + // end of method genPrimitiveShift() /* Creates a new `StringBuilder` instance with the requested capacity * * can-multi-thread */ - final def genNewStringBuilder(size: Int): Unit = { + final def genNewStringBuilder(size: Int): Unit = jmethod.visitTypeInsn(Opcodes.NEW, JavaStringBuilderClassName) jmethod.visitInsn(Opcodes.DUP) jmethod.visitLdcInsn(Integer.valueOf(size)) @@ -203,14 +192,13 @@ trait BCodeIdiomatic { "(I)V", itf = false ) - } /* Issue a call to `StringBuilder#append` for the right element type * * can-multi-thread */ - final def genStringBuilderAppend(elemType: BType): Unit = { - val paramType = elemType match { + final def genStringBuilderAppend(elemType: BType): Unit = + val paramType = elemType match case ct: ClassBType if ct.isSubtypeOf(StringRef) => StringRef case ct: ClassBType if ct.isSubtypeOf(jlStringBufferRef) => jlStringBufferRef case ct: ClassBType if ct.isSubtypeOf(jlCharSequenceRef) => jlCharSequenceRef @@ -220,18 +208,15 @@ trait BCodeIdiomatic { // jlStringBuilder does not have overloads for byte and short, but we can just use the int version case BYTE | SHORT => INT case pt: PrimitiveBType => pt - } val bt = MethodBType(List(paramType), jlStringBuilderRef) invokevirtual(JavaStringBuilderClassName, "append", bt.descriptor) - } /* Extract the built `String` from the `StringBuilder` * * can-multi-thread */ - final def genStringBuilderEnd: Unit = { + final def genStringBuilderEnd: Unit = invokevirtual(JavaStringBuilderClassName, "toString", genStringBuilderEndDesc) - } // Use ClassBType refs instead of plain string literal to make sure that needed ClassBTypes are initialized and reachable private lazy val genStringBuilderEndDesc = MethodBType(Nil, StringRef).descriptor @@ -244,14 +229,13 @@ trait BCodeIdiomatic { recipe: String, argTypes: Seq[asm.Type], constants: Seq[String] - ): Unit = { + ): Unit = jmethod.visitInvokeDynamicInsn( "makeConcatWithConstants", asm.Type.getMethodDescriptor(StringRef.toASMType, argTypes:_*), coreBTypes.jliStringConcatFactoryMakeConcatWithConstantsHandle, (recipe +: constants):_* ) - } /* * Emits one or more conversion instructions based on the types given as arguments. @@ -261,15 +245,15 @@ trait BCodeIdiomatic { * * can-multi-thread */ - final def emitT2T(from: BType, to: BType): Unit = { + final def emitT2T(from: BType, to: BType): Unit = assert( from.isNonVoidPrimitiveType && to.isNonVoidPrimitiveType, s"Cannot emit primitive conversion from $from to $to" ) - def pickOne(opcs: Array[Int]): Unit = { // TODO index on to.sort - val chosen = (to: @unchecked) match { + def pickOne(opcs: Array[Int]): Unit = // TODO index on to.sort + val chosen = (to: @unchecked) match case BYTE => opcs(0) case SHORT => opcs(1) case CHAR => opcs(2) @@ -277,16 +261,14 @@ trait BCodeIdiomatic { case LONG => opcs(4) case FLOAT => opcs(5) case DOUBLE => opcs(6) - } if (chosen != -1) { emit(chosen) } - } if (from == to) { return } // the only conversion involving BOOL that is allowed is (BOOL -> BOOL) assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to") // We're done with BOOL already - from match { + from match // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" @@ -297,86 +279,74 @@ trait BCodeIdiomatic { case FLOAT => import asm.Opcodes.{ F2L, F2D, F2I } - to match { + to match case LONG => emit(F2L) case DOUBLE => emit(F2D) case _ => emit(F2I); emitT2T(INT, to) - } case LONG => import asm.Opcodes.{ L2F, L2D, L2I } - to match { + to match case FLOAT => emit(L2F) case DOUBLE => emit(L2D) case _ => emit(L2I); emitT2T(INT, to) - } case DOUBLE => import asm.Opcodes.{ D2L, D2F, D2I } - to match { + to match case FLOAT => emit(D2F) case LONG => emit(D2L) case _ => emit(D2I); emitT2T(INT, to) - } - } - } // end of emitT2T() + // end of emitT2T() // can-multi-thread final def boolconst(b: Boolean): Unit = { iconst(if (b) 1 else 0) } // can-multi-thread - final def iconst(cst: Int): Unit = { - if (cst >= -1 && cst <= 5) { + final def iconst(cst: Int): Unit = + if (cst >= -1 && cst <= 5) emit(Opcodes.ICONST_0 + cst) - } else if (cst >= java.lang.Byte.MIN_VALUE && cst <= java.lang.Byte.MAX_VALUE) { + else if (cst >= java.lang.Byte.MIN_VALUE && cst <= java.lang.Byte.MAX_VALUE) jmethod.visitIntInsn(Opcodes.BIPUSH, cst) - } else if (cst >= java.lang.Short.MIN_VALUE && cst <= java.lang.Short.MAX_VALUE) { + else if (cst >= java.lang.Short.MIN_VALUE && cst <= java.lang.Short.MAX_VALUE) jmethod.visitIntInsn(Opcodes.SIPUSH, cst) - } else { + else jmethod.visitLdcInsn(Integer.valueOf(cst)) - } - } // can-multi-thread - final def lconst(cst: Long): Unit = { - if (cst == 0L || cst == 1L) { + final def lconst(cst: Long): Unit = + if (cst == 0L || cst == 1L) emit(Opcodes.LCONST_0 + cst.asInstanceOf[Int]) - } else { + else jmethod.visitLdcInsn(java.lang.Long.valueOf(cst)) - } - } // can-multi-thread - final def fconst(cst: Float): Unit = { + final def fconst(cst: Float): Unit = val bits: Int = java.lang.Float.floatToIntBits(cst) - if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2 + if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) // 0..2 emit(Opcodes.FCONST_0 + cst.asInstanceOf[Int]) - } else { + else jmethod.visitLdcInsn(java.lang.Float.valueOf(cst)) - } - } // can-multi-thread - final def dconst(cst: Double): Unit = { + final def dconst(cst: Double): Unit = val bits: Long = java.lang.Double.doubleToLongBits(cst) - if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d + if (bits == 0L || bits == 0x3ff0000000000000L) // +0.0d and 1.0d emit(Opcodes.DCONST_0 + cst.asInstanceOf[Int]) - } else { + else jmethod.visitLdcInsn(java.lang.Double.valueOf(cst)) - } - } // can-multi-thread - final def newarray(elem: BType): Unit = { - elem match { + final def newarray(elem: BType): Unit = + elem match case c: RefBType => /* phantom type at play in `Array(null)`, SI-1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which isObject. */ jmethod.visitTypeInsn(Opcodes.ANEWARRAY, c.classOrArrayType) case _ => assert(elem.isNonVoidPrimitiveType) - val rand = { + val rand = // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - elem match { + elem match case BOOL => Opcodes.T_BOOLEAN case BYTE => Opcodes.T_BYTE case SHORT => Opcodes.T_SHORT @@ -385,11 +355,7 @@ trait BCodeIdiomatic { case LONG => Opcodes.T_LONG case FLOAT => Opcodes.T_FLOAT case DOUBLE => Opcodes.T_DOUBLE - } - } jmethod.visitIntInsn(Opcodes.NEWARRAY, rand) - } - } final def load( idx: Int, tk: BType): Unit = { emitVarInsn(Opcodes.ILOAD, idx, tk) } // can-multi-thread @@ -407,26 +373,21 @@ trait BCodeIdiomatic { final def rem(tk: BType): Unit = { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread // can-multi-thread - final def invokespecial(owner: String, name: String, desc: String, itf: Boolean): Unit = { + final def invokespecial(owner: String, name: String, desc: String, itf: Boolean): Unit = emitInvoke(Opcodes.INVOKESPECIAL, owner, name, desc, itf) - } // can-multi-thread - final def invokestatic(owner: String, name: String, desc: String, itf: Boolean): Unit = { + final def invokestatic(owner: String, name: String, desc: String, itf: Boolean): Unit = emitInvoke(Opcodes.INVOKESTATIC, owner, name, desc, itf) - } // can-multi-thread - final def invokeinterface(owner: String, name: String, desc: String): Unit = { + final def invokeinterface(owner: String, name: String, desc: String): Unit = emitInvoke(Opcodes.INVOKEINTERFACE, owner, name, desc, itf = true) - } // can-multi-thread - final def invokevirtual(owner: String, name: String, desc: String): Unit = { + final def invokevirtual(owner: String, name: String, desc: String): Unit = emitInvoke(Opcodes.INVOKEVIRTUAL, owner, name, desc, itf = false) - } - def emitInvoke(opcode: Int, owner: String, name: String, desc: String, itf: Boolean): Unit = { + def emitInvoke(opcode: Int, owner: String, name: String, desc: String, itf: Boolean): Unit = val node = new MethodInsnNode(opcode, owner, name, desc, itf) jmethod.instructions.add(node) - } // can-multi-thread @@ -436,160 +397,137 @@ trait BCodeIdiomatic { // can-multi-thread final def emitIF_ICMP(cond: TestOp, label: asm.Label): Unit = { jmethod.visitJumpInsn(cond.opcodeIFICMP(), label) } // can-multi-thread - final def emitIF_ACMP(cond: TestOp, label: asm.Label): Unit = { + final def emitIF_ACMP(cond: TestOp, label: asm.Label): Unit = assert((cond == EQ) || (cond == NE), cond) val opc = (if (cond == EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE) jmethod.visitJumpInsn(opc, label) - } // can-multi-thread final def emitIFNONNULL(label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) } // can-multi-thread final def emitIFNULL (label: asm.Label): Unit = { jmethod.visitJumpInsn(Opcodes.IFNULL, label) } // can-multi-thread - final def emitRETURN(tk: BType): Unit = { + final def emitRETURN(tk: BType): Unit = if (tk == UNIT) { emit(Opcodes.RETURN) } else { emitTypeBased(JCodeMethodN.returnOpcodes, tk) } - } /* Emits one of tableswitch or lookoupswitch. * * can-multi-thread */ - final def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double): Unit = { + final def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double): Unit = assert(keys.length == branches.length) // For empty keys, it makes sense emitting LOOKUPSWITCH with defaultBranch only. // Similar to what javac emits for a switch statement consisting only of a default case. - if (keys.length == 0) { + if (keys.length == 0) jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches) return - } // sort `keys` by increasing key, keeping `branches` in sync. TODO FIXME use quicksort var i = 1 - while (i < keys.length) { + while (i < keys.length) var j = 1 - while (j <= keys.length - i) { - if (keys(j) < keys(j - 1)) { + while (j <= keys.length - i) + if (keys(j) < keys(j - 1)) val tmp = keys(j) keys(j) = keys(j - 1) keys(j - 1) = tmp val tmpL = branches(j) branches(j) = branches(j - 1) branches(j - 1) = tmpL - } j += 1 - } i += 1 - } // check for duplicate keys to avoid "VerifyError: unsorted lookupswitch" (SI-6011) i = 1 - while (i < keys.length) { - if (keys(i-1) == keys(i)) { + while (i < keys.length) + if (keys(i-1) == keys(i)) abort("duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.") - } i += 1 - } val keyMin = keys(0) val keyMax = keys(keys.length - 1) - val isDenseEnough: Boolean = { + val isDenseEnough: Boolean = /* Calculate in long to guard against overflow. TODO what overflow? */ val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double] val klenD: Double = keys.length val kdensity: Double = (klenD / keyRangeD) kdensity >= minDensity - } - if (isDenseEnough) { + if (isDenseEnough) // use a table in which holes are filled with defaultBranch. val keyRange = (keyMax - keyMin + 1) val newBranches = new Array[asm.Label](keyRange) var oldPos = 0 var i = 0 - while (i < keyRange) { + while (i < keyRange) val key = keyMin + i; - if (keys(oldPos) == key) { + if (keys(oldPos) == key) newBranches(i) = branches(oldPos) oldPos += 1 - } else { + else newBranches(i) = defaultBranch - } i += 1 - } assert(oldPos == keys.length, "emitSWITCH") jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*) - } else { + else jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches) - } - } // internal helpers -- not part of the public API of `jcode` // don't make private otherwise inlining will suffer // can-multi-thread - final def emitVarInsn(opc: Int, idx: Int, tk: BType): Unit = { + final def emitVarInsn(opc: Int, idx: Int, tk: BType): Unit = assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc) jmethod.visitVarInsn(tk.typedOpcode(opc), idx) - } // ---------------- array load and store ---------------- // can-multi-thread - final def emitTypeBased(opcs: Array[Int], tk: BType): Unit = { + final def emitTypeBased(opcs: Array[Int], tk: BType): Unit = assert(tk != UNIT, tk) - val opc = { + val opc = if (tk.isRef) { opcs(0) } - else if (tk.isIntSizedType) { - (tk: @unchecked) match { + else if (tk.isIntSizedType) + (tk: @unchecked) match case BOOL | BYTE => opcs(1) case SHORT => opcs(2) case CHAR => opcs(3) case INT => opcs(4) - } - } else { - (tk: @unchecked) match { + else + (tk: @unchecked) match case LONG => opcs(5) case FLOAT => opcs(6) case DOUBLE => opcs(7) - } - } - } emit(opc) - } // ---------------- primitive operations ---------------- // can-multi-thread - final def emitPrimitive(opcs: Array[Int], tk: BType): Unit = { - val opc = { + final def emitPrimitive(opcs: Array[Int], tk: BType): Unit = + val opc = // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match" - tk match { + tk match case LONG => opcs(1) case FLOAT => opcs(2) case DOUBLE => opcs(3) case _ => opcs(0) - } - } emit(opc) - } // can-multi-thread final def drop(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.POP2 else Opcodes.POP) } // can-multi-thread - final def dropMany(size: Int): Unit = { + final def dropMany(size: Int): Unit = var s = size while s >= 2 do emit(Opcodes.POP2) s -= 2 if s > 0 then emit(Opcodes.POP) - } // can-multi-thread final def dup(tk: BType): Unit = { emit(if (tk.isWideType) Opcodes.DUP2 else Opcodes.DUP) } @@ -597,25 +535,22 @@ trait BCodeIdiomatic { // ---------------- type checks and casts ---------------- // can-multi-thread - final def isInstance(tk: RefBType): Unit = { + final def isInstance(tk: RefBType): Unit = jmethod.visitTypeInsn(Opcodes.INSTANCEOF, tk.classOrArrayType) - } // can-multi-thread - final def checkCast(tk: RefBType): Unit = { + final def checkCast(tk: RefBType): Unit = // TODO ICode also requires: but that's too much, right? assert(!isBoxedType(tk), "checkcast on boxed type: " + tk) jmethod.visitTypeInsn(Opcodes.CHECKCAST, tk.classOrArrayType) - } - def abort(msg: String): Nothing = { + def abort(msg: String): Nothing = report.error(msg) throw new RuntimeException(msg) - } - } // end of class JCodeMethodN + // end of class JCodeMethodN /* Constant-valued val-members of JCodeMethodN at the companion object, so as to avoid re-initializing them multiple times. */ - object JCodeMethodN { + object JCodeMethodN: import asm.Opcodes._ @@ -641,7 +576,7 @@ trait BCodeIdiomatic { val divOpcodes: Array[Int] = { Array(IDIV, LDIV, FDIV, DDIV) } val remOpcodes: Array[Int] = { Array(IREM, LREM, FREM, DREM) } - } // end of object JCodeMethodN + // end of object JCodeMethodN // ---------------- adapted from scalaPrimitives ---------------- @@ -650,9 +585,9 @@ trait BCodeIdiomatic { * * can-multi-thread */ - final def coercionFrom(code: Int): BType = { + final def coercionFrom(code: Int): BType = import ScalaPrimitivesOps._ - (code: @switch) match { + (code: @switch) match case B2B | B2C | B2S | B2I | B2L | B2F | B2D => BYTE case S2B | S2S | S2C | S2I | S2L | S2F | S2D => SHORT case C2B | C2S | C2C | C2I | C2L | C2F | C2D => CHAR @@ -660,16 +595,14 @@ trait BCodeIdiomatic { case L2B | L2S | L2C | L2I | L2L | L2F | L2D => LONG case F2B | F2S | F2C | F2I | F2L | F2F | F2D => FLOAT case D2B | D2S | D2C | D2I | D2L | D2F | D2D => DOUBLE - } - } /* If code is a coercion primitive, the result type. * * can-multi-thread */ - final def coercionTo(code: Int): BType = { + final def coercionTo(code: Int): BType = import ScalaPrimitivesOps._ - (code: @switch) match { + (code: @switch) match case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR case B2S | C2S | S2S | I2S | L2S | F2S | D2S => SHORT @@ -677,20 +610,13 @@ trait BCodeIdiomatic { case B2L | C2L | S2L | I2L | L2L | F2L | D2L => LONG case B2F | C2F | S2F | I2F | L2F | F2F | D2F => FLOAT case B2D | C2D | S2D | I2D | L2D | F2D | D2D => DOUBLE - } - } - implicit class InsnIterMethodNode(mnode: asm.tree.MethodNode) { + implicit class InsnIterMethodNode(mnode: asm.tree.MethodNode): @`inline` final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit): Unit = { mnode.instructions.foreachInsn(f) } - } - implicit class InsnIterInsnList(lst: asm.tree.InsnList) { + implicit class InsnIterInsnList(lst: asm.tree.InsnList): - @`inline` final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit): Unit = { + @`inline` final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit): Unit = val insnIter = lst.iterator() - while (insnIter.hasNext) { + while (insnIter.hasNext) f(insnIter.next()) - } - } - } -} diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala index 0a11fb898b48..16a69e54520c 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSkelBuilder.scala @@ -30,7 +30,7 @@ import dotty.tools.dotc.transform.SymUtils._ * @version 1.0 * */ -trait BCodeSkelBuilder extends BCodeHelpers { +trait BCodeSkelBuilder extends BCodeHelpers: import int.{_, given} import DottyBackendInterface.{symExtensions, _} import tpd._ @@ -78,7 +78,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { with JAndroidBuilder with BCForwardersGen with BCPickles - with BCJGenSigGen { + with BCJGenSigGen: // Strangely I can't find this in the asm code 255, but reserving 1 for "this" inline val MaximumJvmParameters = 254 @@ -93,15 +93,13 @@ trait BCodeSkelBuilder extends BCodeHelpers { /* ---------------- idiomatic way to ask questions to typer ---------------- */ - def paramTKs(app: Apply, take: Int = -1): List[BType] = app match { + def paramTKs(app: Apply, take: Int = -1): List[BType] = app match case Apply(fun, _) => - val funSym = fun.symbol - (funSym.info.firstParamTypes map toTypeKind) // this tracks mentioned inner classes (in innerClassBufferASM) - } + val funSym = fun.symbol + (funSym.info.firstParamTypes map toTypeKind) // this tracks mentioned inner classes (in innerClassBufferASM) - def symInfoTK(sym: Symbol): BType = { + def symInfoTK(sym: Symbol): BType = toTypeKind(sym.info) // this tracks mentioned inner classes (in innerClassBufferASM) - } def tpeTK(tree: Tree): BType = { toTypeKind(tree.tpe) } @@ -109,20 +107,20 @@ trait BCodeSkelBuilder extends BCodeHelpers { /* ---------------- helper utils for generating classes and fields ---------------- */ - def genPlainClass(cd0: TypeDef) = cd0 match { + def genPlainClass(cd0: TypeDef) = cd0 match case TypeDef(_, impl: Template) => - assert(cnode == null, "GenBCode detected nested methods.") + assert(cnode == null, "GenBCode detected nested methods.") - claszSymbol = cd0.symbol - isCZParcelable = isAndroidParcelableClass(claszSymbol) - isCZStaticModule = claszSymbol.isStaticModuleClass - thisName = internalName(claszSymbol) + claszSymbol = cd0.symbol + isCZParcelable = isAndroidParcelableClass(claszSymbol) + isCZStaticModule = claszSymbol.isStaticModuleClass + thisName = internalName(claszSymbol) - cnode = new ClassNode1() + cnode = new ClassNode1() - initJClass(cnode) + initJClass(cnode) - val cd = if (isCZStaticModule) { + val cd = if (isCZStaticModule) // Move statements from the primary constructor following the superclass constructor call to // a newly synthesised tree representing the "", which also assigns the MODULE$ field. // Because the assigments to both the module instance fields, and the fields of the module itself @@ -150,26 +148,26 @@ trait BCodeSkelBuilder extends BCodeHelpers { // // !!! Part of this logic is duplicated in JSCodeGen.genCompilationUnit - claszSymbol.info.decls.foreach { f => - if f.isField && !f.name.is(LazyBitMapName) && !f.name.is(LazyLocalName) then - f.setFlag(JavaStatic) - } + claszSymbol.info.decls.foreach { f => + if f.isField && !f.name.is(LazyBitMapName) && !f.name.is(LazyLocalName) then + f.setFlag(JavaStatic) + } - val (clinits, body) = impl.body.partition(stat => stat.isInstanceOf[DefDef] && stat.symbol.isStaticConstructor) + val (clinits, body) = impl.body.partition(stat => stat.isInstanceOf[DefDef] && stat.symbol.isStaticConstructor) - val (uptoSuperStats, remainingConstrStats) = splitAtSuper(impl.constr.rhs.asInstanceOf[Block].stats) - val clInitSymbol: TermSymbol = - if (clinits.nonEmpty) clinits.head.symbol.asTerm - else newSymbol( + val (uptoSuperStats, remainingConstrStats) = splitAtSuper(impl.constr.rhs.asInstanceOf[Block].stats) + val clInitSymbol: TermSymbol = + if (clinits.nonEmpty) clinits.head.symbol.asTerm + else newSymbol( claszSymbol, nme.STATIC_CONSTRUCTOR, JavaStatic | Method, MethodType(Nil)(_ => Nil, _ => defn.UnitType), privateWithin = NoSymbol, coord = claszSymbol.coord - ) + ) - val moduleField = newSymbol( + val moduleField = newSymbol( claszSymbol, str.MODULE_INSTANCE_FIELD.toTermName, JavaStatic | Final, @@ -178,72 +176,66 @@ trait BCodeSkelBuilder extends BCodeHelpers { coord = claszSymbol.coord ).entered - val thisMap = new TreeMap { - override def transform(tree: Tree)(using Context) = { - val tp = tree.tpe.substThis(claszSymbol.asClass, claszSymbol.sourceModule.termRef) - tree.withType(tp) match { - case tree: This if tree.symbol == claszSymbol => - ref(claszSymbol.sourceModule) - case tree => - super.transform(tree) - } - } - } + val thisMap = new TreeMap: + override def transform(tree: Tree)(using Context) = + val tp = tree.tpe.substThis(claszSymbol.asClass, claszSymbol.sourceModule.termRef) + tree.withType(tp) match + case tree: This if tree.symbol == claszSymbol => + ref(claszSymbol.sourceModule) + case tree => + super.transform(tree) + + def rewire(stat: Tree) = thisMap.transform(stat).changeOwner(claszSymbol.primaryConstructor, clInitSymbol) + + val callConstructor = New(claszSymbol.typeRef).select(claszSymbol.primaryConstructor).appliedToTermArgs(Nil) + val assignModuleField = Assign(ref(moduleField), callConstructor) + val remainingConstrStatsSubst = remainingConstrStats.map(rewire) + val clinit = clinits match + case (ddef: DefDef) :: _ => + cpy.DefDef(ddef)(rhs = Block(ddef.rhs :: assignModuleField :: remainingConstrStatsSubst, unitLiteral)) + case _ => + DefDef(clInitSymbol, Block(assignModuleField :: remainingConstrStatsSubst, unitLiteral)) - def rewire(stat: Tree) = thisMap.transform(stat).changeOwner(claszSymbol.primaryConstructor, clInitSymbol) + val constr2 = + val rhs = Block(uptoSuperStats, impl.constr.rhs.asInstanceOf[Block].expr) + cpy.DefDef(impl.constr)(rhs = rhs) - val callConstructor = New(claszSymbol.typeRef).select(claszSymbol.primaryConstructor).appliedToTermArgs(Nil) - val assignModuleField = Assign(ref(moduleField), callConstructor) - val remainingConstrStatsSubst = remainingConstrStats.map(rewire) - val clinit = clinits match { - case (ddef: DefDef) :: _ => - cpy.DefDef(ddef)(rhs = Block(ddef.rhs :: assignModuleField :: remainingConstrStatsSubst, unitLiteral)) - case _ => - DefDef(clInitSymbol, Block(assignModuleField :: remainingConstrStatsSubst, unitLiteral)) - } - - val constr2 = { - val rhs = Block(uptoSuperStats, impl.constr.rhs.asInstanceOf[Block].expr) - cpy.DefDef(impl.constr)(rhs = rhs) - } - - val impl2 = cpy.Template(impl)(constr = constr2, body = clinit :: body) - cpy.TypeDef(cd0)(rhs = impl2) - } else cd0 - - val hasStaticCtor = isCZStaticModule || cd.symbol.info.decls.exists(_.isStaticConstructor) - if (!hasStaticCtor && isCZParcelable) fabricateStaticInitAndroid() - - val optSerial: Option[Long] = - claszSymbol.getAnnotation(defn.SerialVersionUIDAnnot).flatMap { annot => - if (claszSymbol.is(Trait)) { - report.warning("@SerialVersionUID does nothing on a trait", annot.tree.sourcePos) - None - } else { - val vuid = annot.argumentConstant(0).map(_.longValue) - if (vuid.isEmpty) - report.error("The argument passed to @SerialVersionUID must be a constant", + val impl2 = cpy.Template(impl)(constr = constr2, body = clinit :: body) + cpy.TypeDef(cd0)(rhs = impl2) + else cd0 + + val hasStaticCtor = isCZStaticModule || cd.symbol.info.decls.exists(_.isStaticConstructor) + if (!hasStaticCtor && isCZParcelable) fabricateStaticInitAndroid() + + val optSerial: Option[Long] = + claszSymbol.getAnnotation(defn.SerialVersionUIDAnnot).flatMap { annot => + if (claszSymbol.is(Trait)) + report.warning("@SerialVersionUID does nothing on a trait", annot.tree.sourcePos) + None + else + val vuid = annot.argumentConstant(0).map(_.longValue) + if (vuid.isEmpty) + report.error("The argument passed to @SerialVersionUID must be a constant", annot.argument(0).getOrElse(annot.tree).sourcePos) - vuid + vuid } - } - if (optSerial.isDefined) { addSerialVUID(optSerial.get, cnode)} + if (optSerial.isDefined) { addSerialVUID(optSerial.get, cnode)} - addClassFields() - gen(cd.rhs) + addClassFields() + gen(cd.rhs) - if (AsmUtils.traceClassEnabled && cnode.name.contains(AsmUtils.traceClassPattern)) - AsmUtils.traceClass(cnode) + if (AsmUtils.traceClassEnabled && cnode.name.contains(AsmUtils.traceClassPattern)) + AsmUtils.traceClass(cnode) - cnode.innerClasses - assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().") + cnode.innerClasses + assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().") - } // end of method genPlainClass() + // end of method genPlainClass() /* * must-single-thread */ - private def initJClass(jclass: asm.ClassVisitor): Unit = { + private def initJClass(jclass: asm.ClassVisitor): Unit = val ps = claszSymbol.info.parents val superClass: String = if (ps.isEmpty) ObjectRef.internalName else internalName(ps.head.typeSymbol) @@ -275,45 +267,39 @@ trait BCodeSkelBuilder extends BCodeHelpers { thisName, thisSignature, superClass, interfaceNames.toArray) - if (emitSource) { + if (emitSource) cnode.visitSource(cunit.source.file.name, null /* SourceDebugExtension */) - } - enclosingMethodAttribute(claszSymbol, internalName, asmMethodType(_).descriptor) match { + enclosingMethodAttribute(claszSymbol, internalName, asmMethodType(_).descriptor) match case Some(EnclosingMethodEntry(className, methodName, methodDescriptor)) => cnode.visitOuterClass(className, methodName, methodDescriptor) case _ => () - } val ssa = None // TODO: inlined form `getAnnotPickle(thisName, claszSymbol)`. Should something be done on Dotty? cnode.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign) emitAnnotations(cnode, claszSymbol.annotations ++ ssa) - if (!isCZStaticModule && !isCZParcelable) { + if (!isCZStaticModule && !isCZParcelable) val skipStaticForwarders = (claszSymbol.is(Module) || ctx.settings.XnoForwarders.value) - if (!skipStaticForwarders) { + if (!skipStaticForwarders) val lmoc = claszSymbol.companionModule // add static forwarders if there are no name conflicts; see bugs #363 and #1735 - if (lmoc != NoSymbol) { + if (lmoc != NoSymbol) // it must be a top level class (name contains no $s) val isCandidateForForwarders = (lmoc.is(Module)) && lmoc.isStatic - if (isCandidateForForwarders) { + if (isCandidateForForwarders) report.log(s"Adding static forwarders from '$claszSymbol' to implementations in '$lmoc'") addForwarders(cnode, thisName, lmoc.moduleClass) - } - } - } - } // the invoker is responsible for adding a class-static constructor. - } // end of method initJClass + // end of method initJClass /* * must-single-thread */ - private def fabricateStaticInitAndroid(): Unit = { + private def fabricateStaticInitAndroid(): Unit = val clinit: asm.MethodVisitor = cnode.visitMethod( GenBCodeOps.PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED @@ -329,9 +315,8 @@ trait BCodeSkelBuilder extends BCodeHelpers { clinit.visitInsn(asm.Opcodes.RETURN) clinit.visitMaxs(0, 0) // just to follow protocol, dummy arguments clinit.visitEnd() - } - def addClassFields(): Unit = { + def addClassFields(): Unit = /* Non-method term members are fields, except for module members. Module * members can only happen on .NET (no flatten) for inner traits. There, * a module symbol is generated (transformInfo in mixin) which is used @@ -339,7 +324,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { * backend emits them as static). * No code is needed for this module symbol. */ - for (f <- claszSymbol.info.decls.filter(p => p.isTerm && !p.is(Method))) { + for (f <- claszSymbol.info.decls.filter(p => p.isTerm && !p.is(Method))) val javagensig = getGenericSignature(f, claszSymbol) val flags = javaFieldFlags(f) @@ -355,9 +340,8 @@ trait BCodeSkelBuilder extends BCodeHelpers { ) cnode.fields.add(jfield) emitAnnotations(jfield, f.annotations) - } - } // end of method addClassFields() + // end of method addClassFields() // current method var mnode: MethodNode1 = null @@ -373,9 +357,8 @@ trait BCodeSkelBuilder extends BCodeHelpers { // line numbers var lastEmittedLineNr = -1 - object bc extends JCodeMethodN { + object bc extends JCodeMethodN: override def jmethod = PlainSkelBuilder.this.mnode - } /* ---------------- Part 1 of program points, ie Labels in the ASM world ---------------- */ @@ -385,18 +368,16 @@ trait BCodeSkelBuilder extends BCodeHelpers { * corresponding expected type. The `LoadDestination` can never be `FallThrough` here. */ var jumpDest: immutable.Map[ /* Labeled */ Symbol, (BType, LoadDestination) ] = null - def registerJumpDest(labelSym: Symbol, expectedType: BType, dest: LoadDestination): Unit = { + def registerJumpDest(labelSym: Symbol, expectedType: BType, dest: LoadDestination): Unit = assert(labelSym.is(Label), s"trying to register a jump-dest for a non-label symbol, at: ${labelSym.span}") assert(dest != LoadDestination.FallThrough, s"trying to register a FallThrough dest for label, at: ${labelSym.span}") assert(!jumpDest.contains(labelSym), s"trying to register a second jump-dest for label, at: ${labelSym.span}") jumpDest += (labelSym -> (expectedType, dest)) - } - def findJumpDest(labelSym: Symbol): (BType, LoadDestination) = { + def findJumpDest(labelSym: Symbol): (BType, LoadDestination) = assert(labelSym.is(Label), s"trying to map a non-label symbol to an asm.Label, at: ${labelSym.span}") jumpDest.getOrElse(labelSym, { abort(s"unknown label symbol, for label at: ${labelSym.span}") }) - } /* * A program point may be lexically nested (at some depth) @@ -428,16 +409,13 @@ trait BCodeSkelBuilder extends BCodeHelpers { * emitted for that purpose as described in `genLoadTry()` and `genSynchronized()`. */ var cleanups: List[asm.Label] = Nil - def registerCleanup(finCleanup: asm.Label): Unit = { + def registerCleanup(finCleanup: asm.Label): Unit = if (finCleanup != null) { cleanups = finCleanup :: cleanups } - } - def unregisterCleanup(finCleanup: asm.Label): Unit = { - if (finCleanup != null) { + def unregisterCleanup(finCleanup: asm.Label): Unit = + if (finCleanup != null) assert(cleanups.head eq finCleanup, s"Bad nesting of cleanup operations: $cleanups trying to unregister: $finCleanup") cleanups = cleanups.tail - } - } /* ---------------- local variables and params ---------------- */ @@ -453,16 +431,15 @@ trait BCodeSkelBuilder extends BCodeHelpers { * { val y = "a" } * } */ - object locals { + object locals: private val slots = mutable.AnyRefMap.empty[Symbol, Local] // (local-or-param-sym -> Local(BType, name, idx, isSynth)) private var nxtIdx = -1 // next available index for local-var - def reset(isStaticMethod: Boolean): Unit = { + def reset(isStaticMethod: Boolean): Unit = slots.clear() nxtIdx = if (isStaticMethod) 0 else 1 - } def contains(locSym: Symbol): Boolean = { slots.contains(locSym) } @@ -471,21 +448,18 @@ trait BCodeSkelBuilder extends BCodeHelpers { /* Make a fresh local variable, ensuring a unique name. * The invoker must make sure inner classes are tracked for the sym's tpe. */ - def makeLocal(tk: BType, name: String, tpe: Type, pos: Span): Symbol = { + def makeLocal(tk: BType, name: String, tpe: Type, pos: Span): Symbol = val locSym = newSymbol(methSymbol, name.toTermName, Synthetic, tpe, NoSymbol, pos) makeLocal(locSym, tk) locSym - } - def makeLocal(locSym: Symbol): Local = { + def makeLocal(locSym: Symbol): Local = makeLocal(locSym, symInfoTK(locSym)) - } - def getOrMakeLocal(locSym: Symbol): Local = { + def getOrMakeLocal(locSym: Symbol): Local = // `getOrElse` below has the same effect as `getOrElseUpdate` because `makeLocal()` adds an entry to the `locals` map. slots.getOrElse(locSym, makeLocal(locSym)) - } def reuseLocal(sym: Symbol, loc: Local): Unit = val existing = slots.put(sym, loc) @@ -495,7 +469,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { def reuseThisSlot(sym: Symbol): Unit = reuseLocal(sym, Local(symInfoTK(sym), sym.javaSimpleName, 0, sym.is(Synthetic))) - private def makeLocal(sym: Symbol, tk: BType): Local = { + private def makeLocal(sym: Symbol, tk: BType): Local = assert(nxtIdx != -1, "not a valid start index") val loc = Local(tk, sym.javaSimpleName, nxtIdx, sym.is(Synthetic)) val existing = slots.put(sym, loc) @@ -504,7 +478,6 @@ trait BCodeSkelBuilder extends BCodeHelpers { assert(tk.size > 0, "makeLocal called for a symbol whose type is Unit.") nxtIdx += tk.size loc - } def makeTempLocal(tk: BType): Local = assert(nxtIdx != -1, "not a valid start index") @@ -514,17 +487,14 @@ trait BCodeSkelBuilder extends BCodeHelpers { loc // not to be confused with `fieldStore` and `fieldLoad` which also take a symbol but a field-symbol. - def store(locSym: Symbol): Unit = { + def store(locSym: Symbol): Unit = val Local(tk, _, idx, _) = slots(locSym) bc.store(idx, tk) - } - def load(locSym: Symbol): Unit = { + def load(locSym: Symbol): Unit = val Local(tk, _, idx, _) = slots(locSym) bc.load(idx, tk) - } - } /* ---------------- Part 2 of program points, ie Labels in the ASM world ---------------- */ @@ -533,51 +503,42 @@ trait BCodeSkelBuilder extends BCodeHelpers { // helpers around program-points. def lastInsn: asm.tree.AbstractInsnNode = mnode.instructions.getLast - def currProgramPoint(): asm.Label = { - lastInsn match { + def currProgramPoint(): asm.Label = + lastInsn match case labnode: asm.tree.LabelNode => labnode.getLabel case _ => val pp = new asm.Label mnode visitLabel pp pp - } - } - def markProgramPoint(lbl: asm.Label): Unit = { + def markProgramPoint(lbl: asm.Label): Unit = val skip = (lbl == null) || isAtProgramPoint(lbl) if (!skip) { mnode visitLabel lbl } - } - def isAtProgramPoint(lbl: asm.Label): Boolean = { - def getNonLineNumberNode(a: asm.tree.AbstractInsnNode): asm.tree.AbstractInsnNode = a match { + def isAtProgramPoint(lbl: asm.Label): Boolean = + def getNonLineNumberNode(a: asm.tree.AbstractInsnNode): asm.tree.AbstractInsnNode = a match case a: asm.tree.LineNumberNode => getNonLineNumberNode(a.getPrevious) // line numbers aren't part of code itself case _ => a - } (getNonLineNumberNode(lastInsn) match { case labnode: asm.tree.LabelNode => (labnode.getLabel == lbl); case _ => false } ) - } - def lineNumber(tree: Tree): Unit = { + def lineNumber(tree: Tree): Unit = @tailrec - def getNonLabelNode(a: asm.tree.AbstractInsnNode): asm.tree.AbstractInsnNode = a match { + def getNonLabelNode(a: asm.tree.AbstractInsnNode): asm.tree.AbstractInsnNode = a match case a: asm.tree.LabelNode => getNonLabelNode(a.getPrevious) case _ => a - } if (!emitLines || !tree.span.exists) return; val nr = ctx.source.offsetToLine(tree.span.point) + 1 - if (nr != lastEmittedLineNr) { + if (nr != lastEmittedLineNr) lastEmittedLineNr = nr - getNonLabelNode(lastInsn) match { + getNonLabelNode(lastInsn) match case lnn: asm.tree.LineNumberNode => // overwrite previous landmark as no instructions have been emitted for it lnn.line = nr case _ => mnode.visitLineNumber(nr, currProgramPoint()) - } - } - } // on entering a method - def resetMethodBookkeeping(dd: DefDef) = { + def resetMethodBookkeeping(dd: DefDef) = val rhs = dd.rhs locals.reset(isStaticMethod = methSymbol.isStaticMember) jumpDest = immutable.Map.empty @@ -592,12 +553,11 @@ trait BCodeSkelBuilder extends BCodeHelpers { stackHeight = 0 lastEmittedLineNr = -1 - } /* ---------------- top-down traversal invoking ASM Tree API along the way ---------------- */ - def gen(tree: Tree): Unit = { - tree match { + def gen(tree: Tree): Unit = + tree match case tpd.EmptyTree => () case ValDef(name, tpt, rhs) => () // fields are added in `genPlainClass()`, via `addClassFields()` @@ -638,13 +598,11 @@ trait BCodeSkelBuilder extends BCodeHelpers { body foreach gen case _ => abort(s"Illegal tree in gen: $tree") - } - } /* * must-single-thread */ - def initJMethod(flags: Int, params: List[Symbol]): Unit = { + def initJMethod(flags: Int, params: List[Symbol]): Unit = val jgensig = getGenericSignature(methSymbol, claszSymbol) val (excs, others) = methSymbol.annotations.partition(_.symbol eq defn.ThrowsAnnot) @@ -669,7 +627,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { emitParamNames(mnode, params) emitParamAnnotations(mnode, params.map(_.annotations)) - } // end of method initJMethod + // end of method initJMethod private def genTraitConstructorDefDef(dd: DefDef): Unit = val statifiedDef = makeStatifiedDefDef(dd) @@ -742,7 +700,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { info = info ).asTerm - def genDefDef(dd: DefDef): Unit = { + def genDefDef(dd: DefDef): Unit = val rhs = dd.rhs val vparamss = dd.termParamss // the only method whose implementation is not emitted: getClass() @@ -763,11 +721,10 @@ trait BCodeSkelBuilder extends BCodeHelpers { for (p <- params) { locals.makeLocal(p.symbol) } // debug assert((params.map(p => locals(p.symbol).tk)) == asmMethodType(methSymbol).getArgumentTypes.toList, "debug") - if (params.size > MaximumJvmParameters) { + if (params.size > MaximumJvmParameters) // SI-7324 report.error(em"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.", ctx.source.atSpan(methSymbol.span)) return - } val isNative = methSymbol.hasAnnotation(NativeAttr) val isAbstractMethod = (methSymbol.is(Deferred) || (methSymbol.owner.isInterface && ((methSymbol.is(Deferred)) || methSymbol.isClassConstructor))) @@ -783,7 +740,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { initJMethod(flags, paramSyms) - if (!isAbstractMethod && !isNative) { + if (!isAbstractMethod && !isNative) // #14773 Reuse locals slots for tailrec-generated mutable vars val trimmedRhs: Tree = @tailrec def loop(stats: List[Tree]): List[Tree] = @@ -813,7 +770,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { rhs end trimmedRhs - def emitNormalMethodBody(): Unit = { + def emitNormalMethodBody(): Unit = val veryFirstProgramPoint = currProgramPoint() if trimmedRhs == tpd.EmptyTree then @@ -826,11 +783,11 @@ trait BCodeSkelBuilder extends BCodeHelpers { else genLoadTo(trimmedRhs, returnType, LoadDestination.Return) - if (emitVars) { + if (emitVars) // add entries to LocalVariableTable JVM attribute val onePastLastProgramPoint = currProgramPoint() val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0) - if (!hasStaticBitSet) { + if (!hasStaticBitSet) mnode.visitLocalVariable( "this", "L" + thisName + ";", @@ -839,42 +796,37 @@ trait BCodeSkelBuilder extends BCodeHelpers { onePastLastProgramPoint, 0 ) - } for (p <- params) { emitLocalVarScope(p.symbol, veryFirstProgramPoint, onePastLastProgramPoint, force = true) } - } if (isMethSymStaticCtor) { appendToStaticCtor(dd) } - } // end of emitNormalMethodBody() + // end of emitNormalMethodBody() lineNumber(rhs) emitNormalMethodBody() // Note we don't invoke visitMax, thus there are no FrameNode among mnode.instructions. // The only non-instruction nodes to be found are LabelNode and LineNumberNode. - } if (AsmUtils.traceMethodEnabled && mnode.name.contains(AsmUtils.traceMethodPattern)) AsmUtils.traceMethod(mnode) mnode = null - } // end of method genDefDef() + // end of method genDefDef() /* * must-single-thread * * TODO document, explain interplay with `fabricateStaticInitAndroid()` */ - private def appendToStaticCtor(dd: DefDef): Unit = { + private def appendToStaticCtor(dd: DefDef): Unit = def insertBefore( location: asm.tree.AbstractInsnNode, i0: asm.tree.AbstractInsnNode, - i1: asm.tree.AbstractInsnNode): Unit = { - if (i0 != null) { + i1: asm.tree.AbstractInsnNode): Unit = + if (i0 != null) mnode.instructions.insertBefore(location, i0.clone(null)) mnode.instructions.insertBefore(location, i1.clone(null)) - } - } // collect all return instructions var rets: List[asm.tree.AbstractInsnNode] = Nil @@ -884,7 +836,7 @@ trait BCodeSkelBuilder extends BCodeHelpers { var insnParcA: asm.tree.AbstractInsnNode = null var insnParcB: asm.tree.AbstractInsnNode = null // android creator code - if (isCZParcelable) { + if (isCZParcelable) // add a static field ("CREATOR") to this class to cache android.os.Parcelable$Creator val andrFieldDescr = classBTypeFromSymbol(AndroidCreatorClass).descriptor cnode.visitField( @@ -902,24 +854,18 @@ trait BCodeSkelBuilder extends BCodeHelpers { insnParcA = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype, false) // PUTSTATIC `thisName`.CREATOR; insnParcB = new asm.tree.FieldInsnNode(asm.Opcodes.PUTSTATIC, thisName, "CREATOR", andrFieldDescr) - } // insert a few instructions for initialization before each return instruction - for(r <- rets) { + for(r <- rets) insertBefore(r, insnParcA, insnParcB) - } - } - def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false): Unit = { + def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false): Unit = val Local(tk, name, idx, isSynth) = locals(sym) - if (force || !isSynth) { + if (force || !isSynth) mnode.visitLocalVariable(name, tk.descriptor, null, start, end, idx) - } - } def genLoadTo(tree: Tree, expectedType: BType, dest: LoadDestination): Unit - } // end of class PlainSkelBuilder + // end of class PlainSkelBuilder -} diff --git a/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala b/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala index b5ed27511e7e..b2ae15a314cf 100644 --- a/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala +++ b/compiler/src/dotty/tools/backend/jvm/BCodeSyncAndTry.scala @@ -18,7 +18,7 @@ import dotty.tools.dotc.ast.tpd * @version 1.0 * */ -trait BCodeSyncAndTry extends BCodeBodyBuilder { +trait BCodeSyncAndTry extends BCodeBodyBuilder: import int.given import tpd._ import bTypes._ @@ -26,23 +26,23 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { /* * Functionality to lower `synchronized` and `try` expressions. */ - abstract class SyncAndTryBuilder(cunit: CompilationUnit) extends PlainBodyBuilder(cunit) { + abstract class SyncAndTryBuilder(cunit: CompilationUnit) extends PlainBodyBuilder(cunit): - def genSynchronized(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match { + def genSynchronized(tree: Apply, expectedType: BType): BType = (tree: @unchecked) match case Apply(TypeApply(fun, _), args) => - val monitor = locals.makeLocal(ObjectRef, "monitor", defn.ObjectType, tree.span) - val monCleanup = new asm.Label + val monitor = locals.makeLocal(ObjectRef, "monitor", defn.ObjectType, tree.span) + val monCleanup = new asm.Label // if the synchronized block returns a result, store it in a local variable. // Just leaving it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks). - val hasResult = (expectedType != UNIT) - val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult", defn.ObjectType, tree.span) else null + val hasResult = (expectedType != UNIT) + val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult", defn.ObjectType, tree.span) else null /* ------ (1) pushing and entering the monitor, also keeping a reference to it in a local var. ------ */ - genLoadQualifier(fun) - bc dup ObjectRef - locals.store(monitor) - emit(asm.Opcodes.MONITORENTER) + genLoadQualifier(fun) + bc dup ObjectRef + locals.store(monitor) + emit(asm.Opcodes.MONITORENTER) /* ------ (2) Synchronized block. * Reached by fall-through from (1). @@ -51,24 +51,24 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { * (2.b) whatever protects the whole synchronized expression. * ------ */ - val startProtected = currProgramPoint() - registerCleanup(monCleanup) - genLoad(args.head, expectedType /* toTypeKind(tree.tpe.resultType) */) - unregisterCleanup(monCleanup) - if (hasResult) { locals.store(monitorResult) } - nopIfNeeded(startProtected) - val endProtected = currProgramPoint() + val startProtected = currProgramPoint() + registerCleanup(monCleanup) + genLoad(args.head, expectedType /* toTypeKind(tree.tpe.resultType) */) + unregisterCleanup(monCleanup) + if (hasResult) { locals.store(monitorResult) } + nopIfNeeded(startProtected) + val endProtected = currProgramPoint() /* ------ (3) monitor-exit after normal, non-early-return, termination of (2). * Reached by fall-through from (2). * Protected by whatever protects the whole synchronized expression. * ------ */ - locals.load(monitor) - emit(asm.Opcodes.MONITOREXIT) - if (hasResult) { locals.load(monitorResult) } - val postHandler = new asm.Label - bc goTo postHandler + locals.load(monitor) + emit(asm.Opcodes.MONITOREXIT) + if (hasResult) { locals.load(monitorResult) } + val postHandler = new asm.Label + bc goTo postHandler /* ------ (4) exception-handler version of monitor-exit code. * Reached upon abrupt termination of (2). @@ -77,44 +77,41 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { * Important not to use j/l/Throwable which dooms the method to a life of interpretation! (SD-233) * ------ */ - protect(startProtected, endProtected, currProgramPoint(), null) - locals.load(monitor) - emit(asm.Opcodes.MONITOREXIT) - emit(asm.Opcodes.ATHROW) + protect(startProtected, endProtected, currProgramPoint(), null) + locals.load(monitor) + emit(asm.Opcodes.MONITOREXIT) + emit(asm.Opcodes.ATHROW) /* ------ (5) cleanup version of monitor-exit code. * Reached upon early-return from (2). * Protected by whatever protects the whole synchronized expression. * ------ */ - if (shouldEmitCleanup) { - markProgramPoint(monCleanup) - locals.load(monitor) - emit(asm.Opcodes.MONITOREXIT) - pendingCleanups() - } + if (shouldEmitCleanup) + markProgramPoint(monCleanup) + locals.load(monitor) + emit(asm.Opcodes.MONITOREXIT) + pendingCleanups() /* ------ (6) normal exit of the synchronized expression. * Reached after normal, non-early-return, termination of (3). * Protected by whatever protects the whole synchronized expression. * ------ */ - mnode visitLabel postHandler + mnode visitLabel postHandler - lineNumber(tree) + lineNumber(tree) - expectedType - } + expectedType /* * Detects whether no instructions have been emitted since label `lbl` and if so emits a NOP. * Useful to avoid emitting an empty try-block being protected by exception handlers, * which results in "java.lang.ClassFormatError: Illegal exception table range". See SI-6102. */ - def nopIfNeeded(lbl: asm.Label): Unit = { + def nopIfNeeded(lbl: asm.Label): Unit = val noInstructionEmitted = isAtProgramPoint(lbl) if (noInstructionEmitted) { emit(asm.Opcodes.NOP) } - } /* * Emitting try-catch is easy, emitting try-catch-finally not quite so. @@ -177,18 +174,16 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { * - "exception-handler-version-of-finally-block" respectively. * */ - def genLoadTry(tree: Try): BType = tree match { + def genLoadTry(tree: Try): BType = tree match case Try(block, catches, finalizer) => - val kind = tpeTK(tree) + val kind = tpeTK(tree) - val caseHandlers: List[EHClause] = - for (CaseDef(pat, _, caseBody) <- catches) yield { - pat match { - case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt).asClassBType, caseBody) - case Ident(nme.WILDCARD) => NamelessEH(jlThrowableRef, caseBody) - case Bind(_, _) => BoundEH (pat.symbol, caseBody) - } - } + val caseHandlers: List[EHClause] = + for (CaseDef(pat, _, caseBody) <- catches) yield + pat match + case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt).asClassBType, caseBody) + case Ident(nme.WILDCARD) => NamelessEH(jlThrowableRef, caseBody) + case Bind(_, _) => BoundEH (pat.symbol, caseBody) // ------ (0) locals used later ------ @@ -201,26 +196,26 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { * The name choice emphasizes that the code section lies "after all exception handlers", * where "all exception handlers" includes those derived from catch-clauses as well as from finally-blocks. */ - val postHandlers = new asm.Label + val postHandlers = new asm.Label - val hasFinally = (finalizer != tpd.EmptyTree) + val hasFinally = (finalizer != tpd.EmptyTree) /* * used in the finally-clause reached via fall-through from try-catch, if any. */ - val guardResult = hasFinally && (kind != UNIT) && mayCleanStack(finalizer) + val guardResult = hasFinally && (kind != UNIT) && mayCleanStack(finalizer) /* * please notice `tmp` has type tree.tpe, while `earlyReturnVar` has the method return type. * Because those two types can be different, dedicated vars are needed. */ - val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp", tree.tpe, tree.span) else null + val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp", tree.tpe, tree.span) else null /* * upon early return from the try-body or one of its EHs (but not the EH-version of the finally-clause) * AND hasFinally, a cleanup is needed. */ - val finCleanup = if (hasFinally) new asm.Label else null + val finCleanup = if (hasFinally) new asm.Label else null /* ------ (1) try-block, protected by: * (1.a) the EHs due to case-clauses, emitted in (2), @@ -229,13 +224,13 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { * ------ */ - val startTryBody = currProgramPoint() - registerCleanup(finCleanup) - genLoad(block, kind) - unregisterCleanup(finCleanup) - nopIfNeeded(startTryBody) - val endTryBody = currProgramPoint() - bc goTo postHandlers + val startTryBody = currProgramPoint() + registerCleanup(finCleanup) + genLoad(block, kind) + unregisterCleanup(finCleanup) + nopIfNeeded(startTryBody) + val endTryBody = currProgramPoint() + bc goTo postHandlers /** * A return within a `try` or `catch` block where a `finally` is present ("early return") @@ -258,12 +253,11 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { * here makes sure that `shouldEmitCleanup` is only propagated outwards, not inwards to * nested `finally` blocks. */ - def withFreshCleanupScope(body: => Unit) = { - val savedShouldEmitCleanup = shouldEmitCleanup - shouldEmitCleanup = false - body - shouldEmitCleanup = savedShouldEmitCleanup || shouldEmitCleanup - } + def withFreshCleanupScope(body: => Unit) = + val savedShouldEmitCleanup = shouldEmitCleanup + shouldEmitCleanup = false + body + shouldEmitCleanup = savedShouldEmitCleanup || shouldEmitCleanup /* ------ (2) One EH for each case-clause (this does not include the EH-version of the finally-clause) * An EH in (2) is reached upon abrupt termination of (1). @@ -273,44 +267,42 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { * ------ */ - for (ch <- caseHandlers) withFreshCleanupScope { + for (ch <- caseHandlers) withFreshCleanupScope: // (2.a) emit case clause proper - val startHandler = currProgramPoint() - var endHandler: asm.Label = null - var excType: ClassBType = null - registerCleanup(finCleanup) - ch match { - case NamelessEH(typeToDrop, caseBody) => - bc drop typeToDrop - genLoad(caseBody, kind) // adapts caseBody to `kind`, thus it can be stored, if `guardResult`, in `tmp`. - nopIfNeeded(startHandler) - endHandler = currProgramPoint() - excType = typeToDrop - - case BoundEH (patSymbol, caseBody) => + val startHandler = currProgramPoint() + var endHandler: asm.Label = null + var excType: ClassBType = null + registerCleanup(finCleanup) + ch match + case NamelessEH(typeToDrop, caseBody) => + bc drop typeToDrop + genLoad(caseBody, kind) // adapts caseBody to `kind`, thus it can be stored, if `guardResult`, in `tmp`. + nopIfNeeded(startHandler) + endHandler = currProgramPoint() + excType = typeToDrop + + case BoundEH (patSymbol, caseBody) => // test/files/run/contrib674.scala , a local-var already exists for patSymbol. // rather than creating on first-access, we do it right away to emit debug-info for the created local var. - val Local(patTK, _, patIdx, _) = locals.getOrMakeLocal(patSymbol) - bc.store(patIdx, patTK) - genLoad(caseBody, kind) - nopIfNeeded(startHandler) - endHandler = currProgramPoint() - emitLocalVarScope(patSymbol, startHandler, endHandler) - excType = patTK.asClassBType - } - unregisterCleanup(finCleanup) + val Local(patTK, _, patIdx, _) = locals.getOrMakeLocal(patSymbol) + bc.store(patIdx, patTK) + genLoad(caseBody, kind) + nopIfNeeded(startHandler) + endHandler = currProgramPoint() + emitLocalVarScope(patSymbol, startHandler, endHandler) + excType = patTK.asClassBType + unregisterCleanup(finCleanup) // (2.b) mark the try-body as protected by this case clause. - protect(startTryBody, endTryBody, startHandler, excType) + protect(startTryBody, endTryBody, startHandler, excType) // (2.c) emit jump to the program point where the finally-clause-for-normal-exit starts, or in effect `after` if no finally-clause was given. - bc goTo postHandlers + bc goTo postHandlers - } // Need to save the state of `shouldEmitCleanup` at this point: while emitting the first // version of the `finally` block below, the variable may become true. But this does not mean // that we need a cleanup version for the current block, only for the enclosing ones. - val currentFinallyBlockNeedsCleanup = shouldEmitCleanup + val currentFinallyBlockNeedsCleanup = shouldEmitCleanup /* ------ (3.A) The exception-handler-version of the finally-clause. * Reached upon abrupt termination of (1) or one of the EHs in (2). @@ -320,16 +312,15 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { // a note on terminology: this is not "postHandlers", despite appearances. // "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts. - if (hasFinally) withFreshCleanupScope { - nopIfNeeded(startTryBody) - val finalHandler = currProgramPoint() // version of the finally-clause reached via unhandled exception. - protect(startTryBody, finalHandler, finalHandler, null) - val Local(eTK, _, eIdx, _) = locals(locals.makeLocal(jlThrowableRef, "exc", defn.ThrowableType, finalizer.span)) - bc.store(eIdx, eTK) - emitFinalizer(finalizer, null, isDuplicate = true) - bc.load(eIdx, eTK) - emit(asm.Opcodes.ATHROW) - } + if (hasFinally) withFreshCleanupScope: + nopIfNeeded(startTryBody) + val finalHandler = currProgramPoint() // version of the finally-clause reached via unhandled exception. + protect(startTryBody, finalHandler, finalHandler, null) + val Local(eTK, _, eIdx, _) = locals(locals.makeLocal(jlThrowableRef, "exc", defn.ThrowableType, finalizer.span)) + bc.store(eIdx, eTK) + emitFinalizer(finalizer, null, isDuplicate = true) + bc.load(eIdx, eTK) + emit(asm.Opcodes.ATHROW) /* ------ (3.B) Cleanup-version of the finally-clause. * Reached upon early RETURN from (1) or upon early RETURN from one of the EHs in (2) @@ -348,12 +339,11 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { // this is not "postHandlers" either. // `shouldEmitCleanup` can be set, and at the same time this try expression may lack a finally-clause. // In other words, all combinations of (hasFinally, shouldEmitCleanup) are valid. - if (hasFinally && currentFinallyBlockNeedsCleanup) { - markProgramPoint(finCleanup) + if (hasFinally && currentFinallyBlockNeedsCleanup) + markProgramPoint(finCleanup) // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted. - emitFinalizer(finalizer, null, isDuplicate = true) - pendingCleanups() - } + emitFinalizer(finalizer, null, isDuplicate = true) + pendingCleanups() /* ------ (4) finally-clause-for-normal-nonEarlyReturn-exit * Reached upon normal, non-early-return termination of (1) or of an EH in (2). @@ -362,65 +352,54 @@ trait BCodeSyncAndTry extends BCodeBodyBuilder { * ------ */ - markProgramPoint(postHandlers) - if (hasFinally) { - emitFinalizer(finalizer, tmp, isDuplicate = false) // the only invocation of emitFinalizer with `isDuplicate == false` - } + markProgramPoint(postHandlers) + if (hasFinally) + emitFinalizer(finalizer, tmp, isDuplicate = false) // the only invocation of emitFinalizer with `isDuplicate == false` - kind - } // end of genLoadTry() + kind + // end of genLoadTry() /* if no more pending cleanups, all that remains to do is return. Otherwise jump to the next (outer) pending cleanup. */ - private def pendingCleanups(): Unit = { - cleanups match { + private def pendingCleanups(): Unit = + cleanups match case Nil => - if (earlyReturnVar != null) { + if (earlyReturnVar != null) locals.load(earlyReturnVar) bc.emitRETURN(locals(earlyReturnVar).tk) - } else { + else bc emitRETURN UNIT - } shouldEmitCleanup = false case nextCleanup :: _ => bc goTo nextCleanup - } - } - def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: ClassBType): Unit = { + def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: ClassBType): Unit = val excInternalName: String = if (excType == null) null else excType.internalName assert(start != end, "protecting a range of zero instructions leads to illegal class format. Solution: add a NOP to that range.") mnode.visitTryCatchBlock(start, end, handler, excInternalName) - } /* `tmp` (if non-null) is the symbol of the local-var used to preserve the result of the try-body, see `guardResult` */ - def emitFinalizer(finalizer: Tree, tmp: Symbol, isDuplicate: Boolean): Unit = { + def emitFinalizer(finalizer: Tree, tmp: Symbol, isDuplicate: Boolean): Unit = var saved: immutable.Map[ /* Labeled */ Symbol, (BType, LoadDestination) ] = null - if (isDuplicate) { + if (isDuplicate) saved = jumpDest - } // when duplicating, the above guarantees new asm.Labels are used for LabelDefs contained in the finalizer (their vars are reused, that's ok) if (tmp != null) { locals.store(tmp) } genLoad(finalizer, UNIT) if (tmp != null) { locals.load(tmp) } - if (isDuplicate) { + if (isDuplicate) jumpDest = saved - } - } /* Does this tree have a try-catch block? */ - def mayCleanStack(tree: Tree): Boolean = tree.find { t => t match { // TODO: use existsSubTree + def mayCleanStack(tree: Tree): Boolean = tree.find { t => t match // TODO: use existsSubTree case Try(_, _, _) => true case _ => false - } }.isDefined trait EHClause case class NamelessEH(typeToDrop: ClassBType, caseBody: Tree) extends EHClause case class BoundEH (patSymbol: Symbol, caseBody: Tree) extends EHClause - } -} diff --git a/compiler/src/dotty/tools/backend/jvm/BTypes.scala b/compiler/src/dotty/tools/backend/jvm/BTypes.scala index 5539bf44aa17..eebe4b412c49 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypes.scala @@ -46,8 +46,8 @@ abstract class BTypes { self => * A BType is either a primitve type, a ClassBType, an ArrayBType of one of these, or a MethodType * referring to BTypes. */ - /*sealed*/ trait BType { // Not sealed for now due to SI-8546 - final override def toString: String = this match { + /*sealed*/ trait BType: // Not sealed for now due to SI-8546 + final override def toString: String = this match case UNIT => "V" case BOOL => "Z" case CHAR => "C" @@ -60,7 +60,6 @@ abstract class BTypes { self => case ClassBType(internalName) => "L" + internalName + ";" case ArrayBType(component) => "[" + component case MethodBType(args, res) => args.mkString("(", "", ")" + res) - } /** * @return The Java descriptor of this type. Examples: @@ -74,11 +73,10 @@ abstract class BTypes { self => /** * @return 0 for void, 2 for long and double, 1 otherwise */ - final def size: Int = this match { + final def size: Int = this match case UNIT => 0 case LONG | DOUBLE => 2 case _ => 1 - } final def isPrimitive: Boolean = this.isInstanceOf[PrimitiveBType] final def isRef: Boolean = this.isInstanceOf[RefBType] @@ -106,39 +104,36 @@ abstract class BTypes { self => * promotions (e.g. BYTE to INT). Its operation can be visualized more easily in terms of the * Java bytecode type hierarchy. */ - final def conformsTo(other: BType): Boolean = { + final def conformsTo(other: BType): Boolean = assert(isRef || isPrimitive, s"conformsTo cannot handle $this") assert(other.isRef || other.isPrimitive, s"conformsTo cannot handle $other") - this match { + this match case ArrayBType(component) => if (other == ObjectRef || other == jlCloneableRef || other == jiSerializableRef) true - else other match { + else other match case ArrayBType(otherComponoent) => component.conformsTo(otherComponoent) case _ => false - } case classType: ClassBType => - if (isBoxed) { + if (isBoxed) if (other.isBoxed) this == other else if (other == ObjectRef) true - else other match { + else other match case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType) // e.g., java/lang/Double conforms to java/lang/Number case _ => false - } - } else if (isNullType) { + else if (isNullType) if (other.isNothingType) false else if (other.isPrimitive) false else true // Null conforms to all classes (except Nothing) and arrays. - } else if (isNothingType) { + else if (isNothingType) true - } else other match { + else other match case otherClassType: ClassBType => classType.isSubtypeOf(otherClassType) // case ArrayBType(_) => this.isNullType // documentation only, because `if (isNullType)` above covers this case case _ => // isNothingType || // documentation only, because `if (isNothingType)` above covers this case false - } case UNIT => other == UNIT @@ -147,14 +142,12 @@ abstract class BTypes { self => case _ => assert(isPrimitive && other.isPrimitive, s"Expected primitive types $this - $other") this == other - } - } /** * Compute the upper bound of two types. * Takes promotions of numeric primitives into account. */ - final def maxType(other: BType): BType = this match { + final def maxType(other: BType): BType = this match case pt: PrimitiveBType => pt.maxValueType(other) case _: ArrayBType | _: ClassBType => @@ -165,13 +158,12 @@ abstract class BTypes { self => assert(other.isRef, s"Cannot compute maxType: $this, $other") // Approximate `lub`. The common type of two references is always ObjectReference. ObjectRef - } /** * See documentation of [[typedOpcode]]. * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 8. */ - private def loadStoreOpcodeOffset: Int = this match { + private def loadStoreOpcodeOffset: Int = this match case UNIT | INT => 0 case BOOL | BYTE => 5 case CHAR => 6 @@ -180,20 +172,18 @@ abstract class BTypes { self => case LONG => 1 case DOUBLE => 3 case _ => 4 - } /** * See documentation of [[typedOpcode]]. * The numbers are taken from asm.Type.VOID_TYPE ff., the values are those shifted by << 16. */ - private def typedOpcodeOffset: Int = this match { + private def typedOpcodeOffset: Int = this match case UNIT => 5 case BOOL | CHAR | BYTE | SHORT | INT => 0 case FLOAT => 2 case LONG => 1 case DOUBLE => 3 case _ => 4 - } /** * Some JVM opcodes have typed variants. This method returns the correct opcode according to @@ -205,12 +195,11 @@ abstract class BTypes { self => * @return The opcode adapted to this java type. For example, if this type is `float` and * `opcode` is `IRETURN`, this method returns `FRETURN`. */ - final def typedOpcode(opcode: Int): Int = { + final def typedOpcode(opcode: Int): Int = if (opcode == asm.Opcodes.IALOAD || opcode == asm.Opcodes.IASTORE) opcode + loadStoreOpcodeOffset else opcode + typedOpcodeOffset - } /** * The asm.Type corresponding to this BType. @@ -223,7 +212,7 @@ abstract class BTypes { self => * - for an OBJECT type, the 'L' and ';' are not part of the range of the created Type * - for an ARRAY type, the full descriptor is part of the range */ - def toASMType: asm.Type = this match { + def toASMType: asm.Type = this match case UNIT => asm.Type.VOID_TYPE case BOOL => asm.Type.BOOLEAN_TYPE case CHAR => asm.Type.CHAR_TYPE @@ -236,15 +225,13 @@ abstract class BTypes { self => case ClassBType(internalName) => asm.Type.getObjectType(internalName) // see (*) above case a: ArrayBType => asm.Type.getObjectType(a.descriptor) case m: MethodBType => asm.Type.getMethodType(m.descriptor) - } def asRefBType : RefBType = this.asInstanceOf[RefBType] def asArrayBType : ArrayBType = this.asInstanceOf[ArrayBType] def asClassBType : ClassBType = this.asInstanceOf[ClassBType] def asPrimitiveBType : PrimitiveBType = this.asInstanceOf[PrimitiveBType] - } - sealed trait PrimitiveBType extends BType { + sealed trait PrimitiveBType extends BType: /** * The upper bound of two primitive types. The `other` type has to be either a primitive @@ -253,7 +240,7 @@ abstract class BTypes { self => * The maxValueType of (Char, Byte) and of (Char, Short) is Int, to encompass the negative * values of Byte and Short. See ticket #2087. */ - final def maxValueType(other: BType): BType = { + final def maxValueType(other: BType): BType = def uncomparable: Nothing = throw new AssertionError(s"Cannot compute maxValueType: $this, $other") @@ -262,41 +249,37 @@ abstract class BTypes { self => if (other.isNothingType) return this if (this == other) return this - this match { + this match case BYTE => if (other == CHAR) INT else if (other.isNumericType) other else uncomparable case SHORT => - other match { + other match case BYTE => SHORT case CHAR => INT case INT | LONG | FLOAT | DOUBLE => other case _ => uncomparable - } case CHAR => - other match { + other match case BYTE | SHORT => INT case INT | LONG | FLOAT | DOUBLE => other case _ => uncomparable - } case INT => - other match { + other match case BYTE | SHORT | CHAR => INT case LONG | FLOAT | DOUBLE => other case _ => uncomparable - } case LONG => - other match { + other match case INT | BYTE | LONG | CHAR | SHORT => LONG case DOUBLE => DOUBLE case FLOAT => FLOAT case _ => uncomparable - } case FLOAT => if (other == DOUBLE) DOUBLE @@ -308,9 +291,6 @@ abstract class BTypes { self => else uncomparable case UNIT | BOOL => uncomparable - } - } - } case object UNIT extends PrimitiveBType case object BOOL extends PrimitiveBType @@ -322,7 +302,7 @@ abstract class BTypes { self => case object LONG extends PrimitiveBType case object DOUBLE extends PrimitiveBType - sealed trait RefBType extends BType { + sealed trait RefBType extends BType: /** * The class or array type of this reference type. Used for ANEWARRAY, MULTIANEWARRAY, * INSTANCEOF and CHECKCAST instructions. Also used for emitting invokevirtual calls to @@ -334,11 +314,9 @@ abstract class BTypes { self => * * This can be verified for example using javap or ASMifier. */ - def classOrArrayType: String = this match { + def classOrArrayType: String = this match case ClassBType(internalName) => internalName case a: ArrayBType => a.descriptor - } - } /** * InnerClass and EnclosingMethod attributes (EnclosingMethod is displayed as OUTERCLASS in asm). @@ -580,7 +558,7 @@ abstract class BTypes { self => * ClassBType is not a case class because we want a custom equals method, and because the * extractor extracts the internalName, which is what you typically need. */ - final class ClassBType(val internalName: String) extends RefBType { + final class ClassBType(val internalName: String) extends RefBType: /** * Write-once variable allows initializing a cyclic graph of infos. This is required for * nested classes. Example: for the definition `class A { class B }` we have @@ -590,20 +568,18 @@ abstract class BTypes { self => */ private var _info: ClassInfo = null - def info: ClassInfo = { + def info: ClassInfo = assert(_info != null, s"ClassBType.info not yet assigned: $this") _info - } - def info_=(i: ClassInfo): Unit = { + def info_=(i: ClassInfo): Unit = assert(_info == null, s"Cannot set ClassBType.info multiple times: $this") _info = i checkInfoConsistency() - } classBTypeFromInternalNameMap(internalName) = this - private def checkInfoConsistency(): Unit = { + private def checkInfoConsistency(): Unit = // we assert some properties. however, some of the linked ClassBType (members, superClass, // interfaces) may not yet have an `_info` (initialization of cyclic structures). so we do a // best-effort verification. @@ -625,7 +601,6 @@ abstract class BTypes { self => ) assert(info.memberClasses.forall(c => ifInit(c)(_.isNestedClass)), info.memberClasses) - } /** * The internal name of a class is the string returned by java.lang.Class.getName, with all '.' @@ -640,10 +615,9 @@ abstract class BTypes { self => def isInterface = (info.flags & asm.Opcodes.ACC_INTERFACE) != 0 - def superClassesTransitive: List[ClassBType] = info.superClass match { + def superClassesTransitive: List[ClassBType] = info.superClass match case None => Nil case Some(sc) => sc :: sc.superClassesTransitive - } def isNestedClass = info.nestedInfo.isDefined @@ -651,7 +625,7 @@ abstract class BTypes { self => if (isNestedClass) this :: info.nestedInfo.get.enclosingClass.enclosingNestedClassesChain else Nil - def innerClassAttributeEntry: Option[InnerClassEntry] = info.nestedInfo map { + def innerClassAttributeEntry: Option[InnerClassEntry] = info.nestedInfo map: case NestedInfo(_, outerName, innerName, isStaticNestedClass) => import GenBCodeOps.addFlagIf InnerClassEntry( @@ -661,25 +635,22 @@ abstract class BTypes { self => info.flags.addFlagIf(isStaticNestedClass, asm.Opcodes.ACC_STATIC) & ClassBType.INNER_CLASSES_FLAGS ) - } - def isSubtypeOf(other: ClassBType): Boolean = { + def isSubtypeOf(other: ClassBType): Boolean = if (this == other) return true - if (isInterface) { + if (isInterface) if (other == ObjectRef) return true // interfaces conform to Object if (!other.isInterface) return false // this is an interface, the other is some class other than object. interfaces cannot extend classes, so the result is false. // else: this and other are both interfaces. continue to (*) - } else { + else val sc = info.superClass if (sc.isDefined && sc.get.isSubtypeOf(other)) return true // the superclass of this class conforms to other if (!other.isInterface) return false // this and other are both classes, and the superclass of this does not conform // else: this is a class, the other is an interface. continue to (*) - } // (*) check if some interface of this class conforms to other. info.interfaces.exists(_.isSubtypeOf(other)) - } /** * Finding the least upper bound in agreement with the bytecode verifier @@ -688,11 +659,11 @@ abstract class BTypes { self => * http://comments.gmane.org/gmane.comp.java.vm.languages/2293 * https://issues.scala-lang.org/browse/SI-3872 */ - def jvmWiseLUB(other: ClassBType): ClassBType = { + def jvmWiseLUB(other: ClassBType): ClassBType = def isNotNullOrNothing(c: ClassBType) = !c.isNullType && !c.isNothingType assert(isNotNullOrNothing(this) && isNotNullOrNothing(other), s"jvmWiseLub for null or nothing: $this - $other") - val res: ClassBType = (this.isInterface, other.isInterface) match { + val res: ClassBType = (this.isInterface, other.isInterface) match case (true, true) => // exercised by test/files/run/t4761.scala if (other.isSubtypeOf(this)) this @@ -712,27 +683,23 @@ abstract class BTypes { self => // MOST LIKELY the answer can be found here, see the comments and links by Miguel: // - https://issues.scala-lang.org/browse/SI-3872 firstCommonSuffix(this :: this.superClassesTransitive, other :: other.superClassesTransitive) - } assert(isNotNullOrNothing(res), s"jvmWiseLub computed: $res") res - } - private def firstCommonSuffix(as: List[ClassBType], bs: List[ClassBType]): ClassBType = { + private def firstCommonSuffix(as: List[ClassBType], bs: List[ClassBType]): ClassBType = var chainA = as var chainB = bs var fcs: ClassBType = null - while { + while if (chainB contains chainA.head) fcs = chainA.head else if (chainA contains chainB.head) fcs = chainB.head - else { + else chainA = chainA.tail chainB = chainB.tail - } fcs == null - } do () + do () fcs - } /** * Custom equals / hashCode: we only compare the name (offset / length) @@ -742,15 +709,13 @@ abstract class BTypes { self => case _ => false }) - override def hashCode: Int = { + override def hashCode: Int = import scala.runtime.Statics var acc: Int = -889275714 acc = Statics.mix(acc, internalName.hashCode) Statics.finalizeHash(acc, 2) - } - } - object ClassBType { + object ClassBType: /** * Pattern matching on a ClassBType extracts the `internalName` of the class. */ @@ -760,12 +725,11 @@ abstract class BTypes { self => * Valid flags for InnerClass attribute entry. * See http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.6 */ - private val INNER_CLASSES_FLAGS = { + private val INNER_CLASSES_FLAGS = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_SYNTHETIC | asm.Opcodes.ACC_ANNOTATION | asm.Opcodes.ACC_ENUM - } // Primitive classes have no super class. A ClassBType for those is only created when // they are actually being compiled (e.g., when compiling scala/Boolean.scala). @@ -785,7 +749,6 @@ abstract class BTypes { self => "scala/Null", "scala/Nothing" ) - } /** * The type info for a class. Used for symboltable-independent subtype checks in the backend. @@ -835,17 +798,14 @@ abstract class BTypes { self => */ case class InnerClassEntry(name: String, outerName: String, innerName: String, flags: Int) - case class ArrayBType(componentType: BType) extends RefBType { - def dimension: Int = componentType match { + case class ArrayBType(componentType: BType) extends RefBType: + def dimension: Int = componentType match case a: ArrayBType => 1 + a.dimension case _ => 1 - } - def elementType: BType = componentType match { + def elementType: BType = componentType match case a: ArrayBType => a.elementType case t => t - } - } case class MethodBType(argumentTypes: List[BType], returnType: BType) extends BType @@ -862,11 +822,10 @@ abstract class BTypes { self => /*final*/ case class MethodNameAndType(name: String, methodType: MethodBType) } -object BTypes { +object BTypes: /** * A marker for strings that represent class internal names. * Ideally the type would be incompatible with String, for example by making it a value class. * But that would create overhead in a Collection[InternalName]. */ type InternalName = String -} diff --git a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala index 884dd19ee64f..25c06cd21d62 100644 --- a/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala +++ b/compiler/src/dotty/tools/backend/jvm/BTypesFromSymbols.scala @@ -21,7 +21,7 @@ import dotty.tools.dotc.core.Phases * information from a symbol and its type to create the corresponding ClassBType. It requires * access to the compiler (global parameter). */ -class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAccess: PostProcessorFrontendAccess) extends BTypes { +class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAccess: PostProcessorFrontendAccess) extends BTypes: import int.{_, given} import DottyBackendInterface.{symExtensions, _} @@ -31,9 +31,8 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce val bCodeAsmCommon: BCodeAsmCommon[int.type ] = new BCodeAsmCommon(int) import bCodeAsmCommon._ - val coreBTypes = new CoreBTypesFromSymbols[I]{ + val coreBTypes = new CoreBTypesFromSymbols[I]: val bTypes: BTypesFromSymbols.this.type = BTypesFromSymbols.this - } import coreBTypes._ @threadUnsafe protected lazy val classBTypeFromInternalNameMap = @@ -47,7 +46,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce /** * The ClassBType for a class symbol `sym`. */ - final def classBTypeFromSymbol(classSym: Symbol): ClassBType = { + final def classBTypeFromSymbol(classSym: Symbol): ClassBType = assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol") assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym") assert( @@ -63,9 +62,8 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce convertedClasses(classSym) = classBType setClassInfo(classSym, classBType) }) - } - final def mirrorClassBTypeFromSymbol(moduleClassSym: Symbol): ClassBType = { + final def mirrorClassBTypeFromSymbol(moduleClassSym: Symbol): ClassBType = assert(moduleClassSym.isTopLevelModuleClass, s"not a top-level module class: $moduleClassSym") val internalName = moduleClassSym.javaBinaryName.stripSuffix(StdNames.str.MODULE_SUFFIX) val bType = ClassBType(internalName) @@ -77,20 +75,17 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce nestedInfo = None ) bType - } - private def setClassInfo(classSym: Symbol, classBType: ClassBType): ClassBType = { - val superClassSym: Symbol = { + private def setClassInfo(classSym: Symbol, classBType: ClassBType): ClassBType = + val superClassSym: Symbol = val t = classSym.asClass.superClass if (t.exists) t - else if (classSym.is(ModuleClass)) { + else if (classSym.is(ModuleClass)) // workaround #371 println(s"Warning: mocking up superclass for $classSym") defn.ObjectClass - } else t - } assert( if (classSym == defn.ObjectClass) superClassSym == NoSymbol @@ -99,7 +94,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce else // A ClassBType for a primitive class (scala.Boolean et al) is only created when compiling these classes. ((superClassSym != NoSymbol) && !superClassSym.isInterface) || (isCompilingPrimitive && primitiveTypeMap.contains(classSym)), - s"Bad superClass for $classSym: $superClassSym" + s"Bad superClass for $classSym: $superClassSym" ) val superClass = if (superClassSym == NoSymbol) None else Some(classBTypeFromSymbol(superClassSym)) @@ -108,7 +103,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce * All interfaces implemented by a class, except for those inherited through the superclass. * Redundant interfaces are removed unless there is a super call to them. */ - extension (sym: Symbol) def superInterfaces: List[Symbol] = { + extension (sym: Symbol) def superInterfaces: List[Symbol] = val directlyInheritedTraits = sym.directlyInheritedTraits val directlyInheritedTraitsSet = directlyInheritedTraits.toSet val allBaseClasses = directlyInheritedTraits.iterator.flatMap(_.asClass.baseClasses.drop(1)).toSet @@ -117,7 +112,6 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce // if (additional.nonEmpty) // println(s"$fullName: adding supertraits $additional") directlyInheritedTraits.filter(t => !allBaseClasses(t) || superCalls(t)) ++ additional - } val interfaces = classSym.superInterfaces.map(classBTypeFromSymbol) @@ -127,7 +121,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce * declared but not otherwise referenced in C (from the bytecode or a method / field signature). * We collect them here. */ - val nestedClassSymbols = { + val nestedClassSymbols = // The lambdalift phase lifts all nested classes to the enclosing class, so if we collect // member classes right after lambdalift, we obtain all nested classes, including local and // anonymous ones. @@ -146,13 +140,11 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce // For consistency, the InnerClass entry for D needs to be present in C - to Java it looks // like D is a member of C, not C$. val linkedClass = classSym.linkedClass - val companionModuleMembers = { + val companionModuleMembers = if (classSym.linkedClass.isTopLevelModuleClass) getMemberClasses(classSym.linkedClass) else Nil - } nestedClasses ++ companionModuleMembers - } /** * For nested java classes, the scala compiler creates both a class and a module (and therefore @@ -161,14 +153,14 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce * Here we get rid of the module class B, making sure that the class B is present. */ val nestedClassSymbolsNoJavaModuleClasses = nestedClassSymbols.filter(s => { - if (s.is(JavaDefined) && s.is(ModuleClass)) { + if (s.is(JavaDefined) && s.is(ModuleClass)) // We could also search in nestedClassSymbols for s.linkedClassOfClass, but sometimes that // returns NoSymbol, so it doesn't work. val nb = nestedClassSymbols.count(mc => mc.name == s.name && mc.owner == s.owner) // this assertion is specific to how ScalaC works. It doesn't apply to dotty, as n dotty there will be B & B$ // assert(nb == 2, s"Java member module without member class: $s - $nestedClassSymbols") false - } else true + else true }) val memberClasses = nestedClassSymbolsNoJavaModuleClasses.map(classBTypeFromSymbol) @@ -177,7 +169,6 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce classBType.info = ClassInfo(superClass, interfaces, flags, memberClasses, nestedInfo) classBType - } /** For currently compiled classes: All locally defined classes including local classes. * The empty list for classes that are not currently compiled. @@ -191,36 +182,33 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce private def definedClasses(sym: Symbol, phase: Phase) = if (sym.isDefinedInCurrentRun) - atPhase(phase) { + atPhase(phase): toDenot(sym).info.decls.filter(sym => sym.isClass && !sym.isEffectivelyErased) - } else Nil - private def buildNestedInfo(innerClassSym: Symbol): Option[NestedInfo] = { + private def buildNestedInfo(innerClassSym: Symbol): Option[NestedInfo] = assert(innerClassSym.isClass, s"Cannot build NestedInfo for non-class symbol $innerClassSym") val isNested = !innerClassSym.originalOwner.originalLexicallyEnclosingClass.is(PackageClass) if (!isNested) None - else { + else // See comment in BTypes, when is a class marked static in the InnerClass table. val isStaticNestedClass = innerClassSym.originalOwner.originalLexicallyEnclosingClass.isOriginallyStaticOwner // After lambdalift (which is where we are), the rawowoner field contains the enclosing class. - val enclosingClassSym = { - if (innerClassSym.isClass) { - atPhase(flattenPhase.prev) { + val enclosingClassSym = + if (innerClassSym.isClass) + atPhase(flattenPhase.prev): toDenot(innerClassSym).owner.enclosingClass - } - } else atPhase(flattenPhase.prev)(innerClassSym.enclosingClass) - } //todo is handled specially for JavaDefined symbols in scalac + //todo is handled specially for JavaDefined symbols in scalac val enclosingClass: ClassBType = classBTypeFromSymbol(enclosingClassSym) - val outerName: Option[String] = { - if (isAnonymousOrLocalClass(innerClassSym)) { + val outerName: Option[String] = + if (isAnonymousOrLocalClass(innerClassSym)) None - } else { + else val outerName = innerClassSym.originalOwner.originalLexicallyEnclosingClass.javaBinaryName def dropModule(str: String): String = if (!str.isEmpty && str.last == '$') str.take(str.length - 1) else str @@ -229,20 +217,14 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce if (innerClassSym.originalOwner.originalLexicallyEnclosingClass.isTopLevelModuleClass) dropModule(outerName) else outerName Some(outerNameModule.toString) - } - } - val innerName: Option[String] = { + val innerName: Option[String] = if (innerClassSym.isAnonymousClass || innerClassSym.isAnonymousFunction) None - else { + else val original = innerClassSym.initial Some(atPhase(original.validFor.phaseId)(innerClassSym.name).mangledString) // moduleSuffix for module classes - } - } Some(NestedInfo(enclosingClass, outerName, innerName, isStaticNestedClass)) - } - } /** * This is basically a re-implementation of sym.isStaticOwner, but using the originalOwner chain. @@ -275,7 +257,7 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce * (*) protected cannot be used, since inner classes 'see' protected members, * and they would fail verification after lifted. */ - final def javaFlags(sym: Symbol): Int = { + final def javaFlags(sym: Symbol): Int = // Classes are always emitted as public. This matches the behavior of Scala 2 // and is necessary for object deserialization to work properly, otherwise @@ -308,14 +290,11 @@ class BTypesFromSymbols[I <: DottyBackendInterface](val int: I, val frontendAcce .addFlagIf(sym.is(Synchronized), ACC_SYNCHRONIZED) .addFlagIf(sym.isDeprecated, ACC_DEPRECATED) .addFlagIf(sym.is(Enum), ACC_ENUM) - } - def javaFieldFlags(sym: Symbol) = { + def javaFieldFlags(sym: Symbol) = import asm.Opcodes._ import GenBCodeOps.addFlagIf javaFlags(sym) .addFlagIf(sym.hasAnnotation(TransientAttr), ACC_TRANSIENT) .addFlagIf(sym.hasAnnotation(VolatileAttr), ACC_VOLATILE) .addFlagIf(!sym.is(Mutable), ACC_FINAL) - } -} diff --git a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala index 2eaaccdd441d..0b0b0dc8e5ae 100644 --- a/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala +++ b/compiler/src/dotty/tools/backend/jvm/BackendUtils.scala @@ -14,14 +14,14 @@ import scala.language.unsafeNulls * This component hosts tools and utilities used in the backend that require access to a `BTypes` * instance. */ -class BackendUtils(val postProcessor: PostProcessor) { +class BackendUtils(val postProcessor: PostProcessor): import postProcessor.{bTypes, frontendAccess} import frontendAccess.{compilerSettings} import bTypes.* import coreBTypes.jliLambdaMetaFactoryAltMetafactoryHandle // Keep synchronized with `minTargetVersion` and `maxTargetVersion` in ScalaSettings - lazy val classfileVersion: Int = compilerSettings.target match { + lazy val classfileVersion: Int = compilerSettings.target match case "8" => asm.Opcodes.V1_8 case "9" => asm.Opcodes.V9 case "10" => asm.Opcodes.V10 @@ -36,23 +36,21 @@ class BackendUtils(val postProcessor: PostProcessor) { case "19" => asm.Opcodes.V19 case "20" => asm.Opcodes.V20 case "21" => asm.Opcodes.V21 - } - lazy val extraProc: Int = { + lazy val extraProc: Int = import GenBCodeOps.addFlagIf val majorVersion: Int = (classfileVersion & 0xFF) val emitStackMapFrame = (majorVersion >= 50) asm.ClassWriter.COMPUTE_MAXS .addFlagIf(emitStackMapFrame, asm.ClassWriter.COMPUTE_FRAMES) - } - def collectSerializableLambdas(classNode: ClassNode): Array[Handle] = { + def collectSerializableLambdas(classNode: ClassNode): Array[Handle] = val indyLambdaBodyMethods = new mutable.ArrayBuffer[Handle] - for (m <- classNode.methods.asScala) { + for (m <- classNode.methods.asScala) val iter = m.instructions.iterator - while (iter.hasNext) { + while (iter.hasNext) val insn = iter.next() - insn match { + insn match case indy: InvokeDynamicInsnNode if indy.bsm == jliLambdaMetaFactoryAltMetafactoryHandle => import java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE @@ -62,11 +60,7 @@ class BackendUtils(val postProcessor: PostProcessor) { val implMethod = indy.bsmArgs(1).asInstanceOf[Handle] indyLambdaBodyMethods += implMethod case _ => - } - } - } indyLambdaBodyMethods.toArray - } /* * Add: @@ -90,7 +84,7 @@ class BackendUtils(val postProcessor: PostProcessor) { * method due to a limitation in the JVM. Instead, we emit a separate invokedynamic bytecode for each group of target * methods. */ - def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = { + def addLambdaDeserialize(classNode: ClassNode, implMethodsArray: Array[Handle]): Unit = import asm.Opcodes._ import bTypes._ import coreBTypes._ @@ -106,10 +100,9 @@ class BackendUtils(val postProcessor: PostProcessor) { val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) - def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = { + def emitLambdaDeserializeIndy(targetMethods: Seq[Handle]): Unit = mv.visitVarInsn(ALOAD, 0) mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, jliLambdaDeserializeBootstrapHandle, targetMethods: _*) - } val targetMethodGroupLimit = 255 - 1 - 3 // JVM limit. See See MAX_MH_ARITY in CallSite.java val groups: Array[Array[Handle]] = implMethodsArray.grouped(targetMethodGroupLimit).toArray @@ -120,40 +113,34 @@ class BackendUtils(val postProcessor: PostProcessor) { val terminalLabel = new Label def nextLabel(i: Int) = if (i == numGroups - 2) terminalLabel else initialLabels(i + 1) - for ((label, i) <- initialLabels.iterator.zipWithIndex) { + for ((label, i) <- initialLabels.iterator.zipWithIndex) mv.visitTryCatchBlock(label, nextLabel(i), nextLabel(i), jlIllegalArgExceptionRef.internalName) - } - for ((label, i) <- initialLabels.iterator.zipWithIndex) { + for ((label, i) <- initialLabels.iterator.zipWithIndex) mv.visitLabel(label) emitLambdaDeserializeIndy(groups(i).toIndexedSeq) mv.visitInsn(ARETURN) - } mv.visitLabel(terminalLabel) emitLambdaDeserializeIndy(groups(numGroups - 1).toIndexedSeq) mv.visitInsn(ARETURN) - } /** * Visit the class node and collect all referenced nested classes. */ - def collectNestedClasses(classNode: ClassNode): (List[ClassBType], List[ClassBType]) = { + def collectNestedClasses(classNode: ClassNode): (List[ClassBType], List[ClassBType]) = // type InternalName = String - val c = new NestedClassesCollector[ClassBType](nestedOnly = true) { + val c = new NestedClassesCollector[ClassBType](nestedOnly = true): def declaredNestedClasses(internalName: InternalName): List[ClassBType] = bTypes.classBTypeFromInternalName(internalName).info.memberClasses - def getClassIfNested(internalName: InternalName): Option[ClassBType] = { + def getClassIfNested(internalName: InternalName): Option[ClassBType] = val c = bTypes.classBTypeFromInternalName(internalName) Option.when(c.isNestedClass)(c) - } def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { // don't crash on invalid generic signatures } - } c.visit(classNode) (c.declaredInnerClasses.toList, c.referredInnerClasses.toList) - } /* * Populates the InnerClasses JVM attribute with `refedInnerClasses`. See also the doc on inner @@ -167,16 +154,13 @@ class BackendUtils(val postProcessor: PostProcessor) { * * can-multi-thread */ - final def addInnerClasses(jclass: asm.ClassVisitor, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = { + final def addInnerClasses(jclass: asm.ClassVisitor, declaredInnerClasses: List[ClassBType], refedInnerClasses: List[ClassBType]): Unit = // sorting ensures nested classes are listed after their enclosing class thus satisfying the Eclipse Java compiler val allNestedClasses = new mutable.TreeSet[ClassBType]()(Ordering.by(_.internalName)) allNestedClasses ++= declaredInnerClasses refedInnerClasses.foreach(allNestedClasses ++= _.enclosingNestedClassesChain) for nestedClass <- allNestedClasses - do { + do // Extract the innerClassEntry - we know it exists, enclosingNestedClassesChain only returns nested classes. val Some(e) = nestedClass.innerClassAttributeEntry: @unchecked jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.flags) - } - } -} diff --git a/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala b/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala index 08e84de92dca..bf40482d01ba 100644 --- a/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala +++ b/compiler/src/dotty/tools/backend/jvm/ClassfileWriter.scala @@ -14,19 +14,19 @@ import java.nio.channels.ClosedByInterruptException import BTypes.InternalName import scala.language.unsafeNulls -class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { +class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess): import frontendAccess.{backendReporting, compilerSettings} // if non-null, classfiles are additionally written to this directory private val dumpOutputDir: AbstractFile = getDirectoryOrNull(compilerSettings.dumpClassesDirectory) // if non-null, classfiles are written to a jar instead of the output directory - private val jarWriter: JarWriter | Null = compilerSettings.outputDirectory match { + private val jarWriter: JarWriter | Null = compilerSettings.outputDirectory match case jar: JarArchive => - val mainClass = compilerSettings.mainClass.orElse { + val mainClass = compilerSettings.mainClass.orElse: // If no main class was specified, see if there's only one // entry point among the classes going into the jar. - frontendAccess.getEntryPoints match { + frontendAccess.getEntryPoints match case name :: Nil => backendReporting.log(i"Unique entry point: setting Main-Class to $name") Some(name) @@ -34,8 +34,6 @@ class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { if names.isEmpty then backendReporting.warning(em"No Main-Class designated or discovered.") else backendReporting.warning(em"No Main-Class due to multiple entry points:\n ${names.mkString("\n ")}") None - } - } jar.underlyingSource.map{ source => if jar.isEmpty then val jarMainAttrs = mainClass.map(Name.MAIN_CLASS -> _).toList @@ -48,15 +46,14 @@ class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { }.orNull case _ => null - } private def getDirectoryOrNull(dir: Option[String]): AbstractFile = dir.map(d => new PlainDirectory(Directory(d))).orNull - private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - if (base.file != null) { + private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = + if (base.file != null) fastGetFile(base, clsName, suffix) - } else { + else def ensureDirectory(dir: AbstractFile): AbstractFile = if (dir.isDirectory) dir else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir) @@ -64,45 +61,38 @@ class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { val pathParts = clsName.split("[./]").toList for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part ensureDirectory(dir) fileNamed pathParts.last + suffix - } - } - private def fastGetFile(base: AbstractFile, clsName: String, suffix: String) = { + private def fastGetFile(base: AbstractFile, clsName: String, suffix: String) = val index = clsName.lastIndexOf('/') - val (packageName, simpleName) = if (index > 0) { + val (packageName, simpleName) = if (index > 0) (clsName.substring(0, index), clsName.substring(index + 1)) - } else ("", clsName) + else ("", clsName) val directory = base.file.toPath.resolve(packageName) new PlainFile(Path(directory.resolve(simpleName + suffix))) - } - private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { - if (outFile.file != null) { + private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = + if (outFile.file != null) val outPath = outFile.file.toPath try Files.write(outPath, bytes) - catch { + catch case _: java.nio.file.NoSuchFileException => Files.createDirectories(outPath.getParent) Files.write(outPath, bytes) - } - } else { + else val out = new DataOutputStream(outFile.bufferedOutput) try out.write(bytes, 0, bytes.length) finally out.close() - } - } - def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile | Null = try { + def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): AbstractFile | Null = try // val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) val outFile = writeToJarOrFile(className, bytes, ".class") // Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) - if (dumpOutputDir != null) { + if (dumpOutputDir != null) val dumpFile = getFile(dumpOutputDir, className, ".class") writeBytes(dumpFile, bytes) - } outFile - } catch { + catch case e: FileConflictException => backendReporting.error(em"error writing $className: ${e.getMessage}") null @@ -110,12 +100,11 @@ class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { if compilerSettings.debug then e.printStackTrace() backendReporting.error(em"error writing $className: ${e.getClass.getName} ${e.getMessage}") null - } def writeTasty(className: InternalName, bytes: Array[Byte]): Unit = writeToJarOrFile(className, bytes, ".tasty") - private def writeToJarOrFile(className: InternalName, bytes: Array[Byte], suffix: String): AbstractFile | Null = { + private def writeToJarOrFile(className: InternalName, bytes: Array[Byte], suffix: String): AbstractFile | Null = if jarWriter == null then val outFolder = compilerSettings.outputDirectory val outFile = getFile(outFolder, className, suffix) @@ -131,12 +120,9 @@ class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { try out.write(bytes, 0, bytes.length) finally out.flush() null - } - def close(): Unit = { + def close(): Unit = if (jarWriter != null) jarWriter.close() - } -} /** Can't output a file due to the state of the file system. */ class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg) diff --git a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala index c9f9e4e23d90..86aa9a19a5f1 100644 --- a/compiler/src/dotty/tools/backend/jvm/CodeGen.scala +++ b/compiler/src/dotty/tools/backend/jvm/CodeGen.scala @@ -40,7 +40,7 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( private lazy val mirrorCodeGen = Impl.JMirrorBuilder() - def genUnit(unit: CompilationUnit): GeneratedDefs = { + def genUnit(unit: CompilationUnit): GeneratedDefs = val generatedClasses = mutable.ListBuffer.empty[GeneratedClass] val generatedTasty = mutable.ListBuffer.empty[GeneratedTasty] @@ -75,7 +75,7 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( def genTastyAndSetAttributes(claszSymbol: Symbol, store: ClassNode): Unit = import Impl.createJAttribute - for (binary <- unit.pickled.get(claszSymbol.asClass)) { + for (binary <- unit.pickled.get(claszSymbol.asClass)) generatedTasty += GeneratedTasty(store, binary) val tasty = val uuid = new TastyHeaderUnpickler(binary()).readHeader() @@ -91,66 +91,57 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( val dataAttr = createJAttribute(nme.TASTYATTR.mangledString, tasty, 0, tasty.length) store.visitAttribute(dataAttr) - } def genClassDefs(tree: Tree): Unit = - tree match { + tree match case EmptyTree => () case PackageDef(_, stats) => stats foreach genClassDefs case ValDef(_, _, _) => () // module val not emitted case td: TypeDef => genClassDef(td) - } genClassDefs(unit.tpdTree) GeneratedDefs(generatedClasses.toList, generatedTasty.toList) - } // Creates a callback that will be evaluated in PostProcessor after creating a file - private def onFileCreated(cls: ClassNode, claszSymbol: Symbol, sourceFile: interfaces.SourceFile): AbstractFile => Unit = clsFile => { - val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase) { + private def onFileCreated(cls: ClassNode, claszSymbol: Symbol, sourceFile: interfaces.SourceFile): AbstractFile => Unit = clsFile => + val (fullClassName, isLocal) = atPhase(sbtExtractDependenciesPhase): (ExtractDependencies.classNameAsString(claszSymbol), claszSymbol.isLocal) - } val className = cls.name.replace('/', '.') if (ctx.compilerCallback != null) ctx.compilerCallback.onClassGenerated(sourceFile, convertAbstractFile(clsFile), className) - if (ctx.sbtCallback != null) { + if (ctx.sbtCallback != null) val jSourceFile = sourceFile.jfile.orElse(null) val cb = ctx.sbtCallback if (isLocal) cb.generatedLocalClass(jSourceFile, clsFile.file) else cb.generatedNonLocalClass(jSourceFile, clsFile.file, className, fullClassName) - } - } /** Convert a `dotty.tools.io.AbstractFile` into a * `dotty.tools.dotc.interfaces.AbstractFile`. */ private def convertAbstractFile(absfile: dotty.tools.io.AbstractFile): interfaces.AbstractFile = - new interfaces.AbstractFile { + new interfaces.AbstractFile: override def name = absfile.name override def path = absfile.path override def jfile = Optional.ofNullable(absfile.file) - } - private def genClass(cd: TypeDef, unit: CompilationUnit): ClassNode = { + private def genClass(cd: TypeDef, unit: CompilationUnit): ClassNode = val b = new Impl.PlainClassBuilder(unit) b.genPlainClass(cd) val cls = b.cnode checkForCaseConflict(cls.name, cd.symbol) cls - } - private def genMirrorClass(classSym: Symbol, unit: CompilationUnit): ClassNode = { + private def genMirrorClass(classSym: Symbol, unit: CompilationUnit): ClassNode = val cls = mirrorCodeGen.genMirrorClass(classSym, unit) checkForCaseConflict(cls.name, classSym) cls - } private val lowerCaseNames = mutable.HashMap.empty[String, Symbol] - private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = { + private def checkForCaseConflict(javaClassName: String, classSymbol: Symbol) = val lowerCaseName = javaClassName.toLowerCase - lowerCaseNames.get(lowerCaseName) match { + lowerCaseNames.get(lowerCaseName) match case None => lowerCaseNames.put(lowerCaseName, classSymbol) case Some(dupClassSym) => @@ -159,23 +150,18 @@ class CodeGen(val int: DottyBackendInterface, val primitives: DottyPrimitives)( if (classSymbol.effectiveName.toString < dupClassSym.effectiveName.toString) (classSymbol, dupClassSym) else (dupClassSym, classSymbol) val same = classSymbol.effectiveName.toString == dupClassSym.effectiveName.toString - atPhase(typerPhase) { + atPhase(typerPhase): if same then // FIXME: This should really be an error, but then FromTasty tests fail report.warning(s"${cl1.show} and ${cl2.showLocated} produce classes that overwrite one another", cl1.sourcePos) else report.warning(s"${cl1.show} differs only in case from ${cl2.showLocated}. " + "Such classes will overwrite one another on case-insensitive filesystems.", cl1.sourcePos) - } - } - } - sealed transparent trait ImplEarlyInit{ + sealed transparent trait ImplEarlyInit: val int: self.int.type = self.int val bTypes: self.bTypes.type = self.bTypes protected val primitives: DottyPrimitives = self.primitives - } - object Impl extends ImplEarlyInit with BCodeSyncAndTry { + object Impl extends ImplEarlyInit with BCodeSyncAndTry: class PlainClassBuilder(unit: CompilationUnit) extends SyncAndTryBuilder(unit) - } } diff --git a/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala index 299c1c75d6cf..c4c911cc35b8 100644 --- a/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala +++ b/compiler/src/dotty/tools/backend/jvm/CollectSuperCalls.scala @@ -17,31 +17,26 @@ import dotty.tools.dotc.transform.MegaPhase.MiniPhase * methods in a redundant mixin class could be implemented with a default abstract method, * the redundant mixin class could be required as a parent by the JVM. */ -class CollectSuperCalls extends MiniPhase { +class CollectSuperCalls extends MiniPhase: import tpd._ override def phaseName: String = CollectSuperCalls.name override def description: String = CollectSuperCalls.description - override def transformSelect(tree: Select)(using Context): Tree = { - tree.qualifier match { + override def transformSelect(tree: Select)(using Context): Tree = + tree.qualifier match case sup: Super => if (tree.symbol.owner.is(Trait)) registerSuperCall(ctx.owner.enclosingClass.asClass, tree.symbol.owner.asClass) case _ => - } tree - } - private def registerSuperCall(sym: ClassSymbol, calls: ClassSymbol)(using Context) = { - genBCodePhase match { + private def registerSuperCall(sym: ClassSymbol, calls: ClassSymbol)(using Context) = + genBCodePhase match case genBCodePhase: GenBCode => genBCodePhase.registerSuperCall(sym, calls) case _ => - } - } -} object CollectSuperCalls: val name: String = "collectSuperCalls" diff --git a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala index 30ad6b29b9f0..d42f497fff41 100644 --- a/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala +++ b/compiler/src/dotty/tools/backend/jvm/CoreBTypes.scala @@ -9,50 +9,49 @@ import scala.tools.asm.{Handle, Opcodes} import dotty.tools.dotc.core.StdNames import BTypes.InternalName -abstract class CoreBTypes { +abstract class CoreBTypes: val bTypes: BTypes import bTypes._ - def primitiveTypeMap: Map[Symbol, PrimitiveBType] + def primitiveTypeMap: Map[Symbol, PrimitiveBType] - def boxedClasses: Set[ClassBType] + def boxedClasses: Set[ClassBType] - def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] + def boxedClassOfPrimitive: Map[PrimitiveBType, ClassBType] - def boxResultType: Map[Symbol, ClassBType] + def boxResultType: Map[Symbol, ClassBType] - def unboxResultType: Map[Symbol, PrimitiveBType] + def unboxResultType: Map[Symbol, PrimitiveBType] - def srNothingRef : ClassBType - def srNullRef : ClassBType + def srNothingRef : ClassBType + def srNullRef : ClassBType - def ObjectRef : ClassBType - def StringRef : ClassBType - def jlStringBuilderRef : ClassBType - def jlStringBufferRef : ClassBType - def jlCharSequenceRef : ClassBType - def jlClassRef : ClassBType - def jlThrowableRef : ClassBType - def jlCloneableRef : ClassBType - def jiSerializableRef : ClassBType - def jlClassCastExceptionRef : ClassBType - def jlIllegalArgExceptionRef : ClassBType - def jliSerializedLambdaRef : ClassBType + def ObjectRef : ClassBType + def StringRef : ClassBType + def jlStringBuilderRef : ClassBType + def jlStringBufferRef : ClassBType + def jlCharSequenceRef : ClassBType + def jlClassRef : ClassBType + def jlThrowableRef : ClassBType + def jlCloneableRef : ClassBType + def jiSerializableRef : ClassBType + def jlClassCastExceptionRef : ClassBType + def jlIllegalArgExceptionRef : ClassBType + def jliSerializedLambdaRef : ClassBType - def srBoxesRuntimeRef: ClassBType + def srBoxesRuntimeRef: ClassBType - def jliLambdaMetaFactoryMetafactoryHandle : Handle - def jliLambdaMetaFactoryAltMetafactoryHandle : Handle - def jliLambdaDeserializeBootstrapHandle : Handle - def jliStringConcatFactoryMakeConcatWithConstantsHandle: Handle + def jliLambdaMetaFactoryMetafactoryHandle : Handle + def jliLambdaMetaFactoryAltMetafactoryHandle : Handle + def jliLambdaDeserializeBootstrapHandle : Handle + def jliStringConcatFactoryMakeConcatWithConstantsHandle: Handle - def asmBoxTo : Map[BType, MethodNameAndType] - def asmUnboxTo: Map[BType, MethodNameAndType] + def asmBoxTo : Map[BType, MethodNameAndType] + def asmUnboxTo: Map[BType, MethodNameAndType] - def typeOfArrayOp: Map[Int, BType] -} + def typeOfArrayOp: Map[Int, BType] -abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTypes { +abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTypes: val bTypes: BTypesFromSymbols[I] import bTypes._ @@ -98,23 +97,21 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy * Maps the method symbol for a box method to the boxed type of the result. For example, the * method symbol for `Byte.box()` is mapped to the ClassBType `java/lang/Byte`. */ - lazy val boxResultType: Map[Symbol, ClassBType] = { + lazy val boxResultType: Map[Symbol, ClassBType] = val boxMethods = defn.ScalaValueClasses().map{x => // @darkdimius Are you sure this should be a def? (x, Erasure.Boxing.boxMethod(x.asClass)) }.toMap for ((valueClassSym, boxMethodSym) <- boxMethods) yield boxMethodSym -> boxedClassOfPrimitive(primitiveTypeMap(valueClassSym)) - } /** * Maps the method symbol for an unbox method to the primitive type of the result. * For example, the method symbol for `Byte.unbox()`) is mapped to the PrimitiveBType BYTE. */ - lazy val unboxResultType: Map[Symbol, PrimitiveBType] = { + lazy val unboxResultType: Map[Symbol, PrimitiveBType] = val unboxMethods: Map[Symbol, Symbol] = defn.ScalaValueClasses().map(x => (x, Erasure.Boxing.unboxMethod(x.asClass))).toMap for ((valueClassSym, unboxMethodSym) <- unboxMethods) yield unboxMethodSym -> primitiveTypeMap(valueClassSym) - } /* * srNothingRef and srNullRef exist at run-time only. They are the bytecode-level manifestation (in @@ -217,7 +214,7 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy DOUBLE -> MethodNameAndType("unboxToDouble", MethodBType(List(ObjectRef), DOUBLE)) ) - lazy val typeOfArrayOp: Map[Int, BType] = { + lazy val typeOfArrayOp: Map[Int, BType] = import dotty.tools.backend.ScalaPrimitivesOps._ Map( (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++ @@ -230,5 +227,3 @@ abstract class CoreBTypesFromSymbols[I <: DottyBackendInterface] extends CoreBTy (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++ (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectRef)) : _* ) - } -} diff --git a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala index b2278c3f0ce8..ef68c7a31fd2 100644 --- a/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala +++ b/compiler/src/dotty/tools/backend/jvm/DottyBackendInterface.scala @@ -25,24 +25,21 @@ import StdNames.nme import NameKinds.{LazyBitMapName, LazyLocalName} import Names.Name -class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context) { +class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymbol]])(using val ctx: Context): private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] - def cachedDesugarIdent(i: Ident): Option[tpd.Select] = { + def cachedDesugarIdent(i: Ident): Option[tpd.Select] = var found = desugared.get(i.tpe) - if (found == null) { - tpd.desugarIdent(i) match { + if (found == null) + tpd.desugarIdent(i) match case sel: tpd.Select => desugared.put(i.tpe, sel) found = sel case _ => - } - } if (found == null) None else Some(found) - } - object DesugaredSelect extends DeconstructorCommon[tpd.Tree] { + object DesugaredSelect extends DeconstructorCommon[tpd.Tree]: var desugared: tpd.Select = null @@ -53,50 +50,40 @@ class DottyBackendInterface(val superCallsMap: ReadOnlyMap[Symbol, Set[ClassSymb def _2: Name = desugared.name - override def unapply(s: tpd.Tree): this.type = { - s match { + override def unapply(s: tpd.Tree): this.type = + s match case t: tpd.Select => desugared = t case t: Ident => - cachedDesugarIdent(t) match { + cachedDesugarIdent(t) match case Some(t) => desugared = t case None => desugared = null - } case _ => desugared = null - } this - } - } - object ArrayValue extends DeconstructorCommon[tpd.JavaSeqLiteral] { - def _1: Type = field.tpe match { + object ArrayValue extends DeconstructorCommon[tpd.JavaSeqLiteral]: + def _1: Type = field.tpe match case JavaArrayType(elem) => elem case _ => report.error(em"JavaSeqArray with type ${field.tpe} reached backend: $field", ctx.source.atSpan(field.span)) UnspecifiedErrorType - } def _2: List[Tree] = field.elems - } - abstract class DeconstructorCommon[T >: Null <: AnyRef] { + abstract class DeconstructorCommon[T >: Null <: AnyRef]: var field: T = null def get: this.type = this def isEmpty: Boolean = field eq null def isDefined = !isEmpty - def unapply(s: T): this.type ={ + def unapply(s: T): this.type = field = s this - } - } -} -object DottyBackendInterface { +object DottyBackendInterface: - private def erasureString(clazz: Class[_]): String = { + private def erasureString(clazz: Class[_]): String = if (clazz.isArray) "Array[" + erasureString(clazz.getComponentType) + "]" else clazz.getName - } def requiredClass(str: String)(using Context): ClassSymbol = Symbols.requiredClass(str) @@ -107,11 +94,10 @@ object DottyBackendInterface { def requiredModule(str: String)(using Context): Symbol = Symbols.requiredModule(str) - def requiredModule[T](using evidence: ClassTag[T], ctx: Context): Symbol = { + def requiredModule[T](using evidence: ClassTag[T], ctx: Context): Symbol = val moduleName = erasureString(evidence.runtimeClass) val className = if (moduleName.endsWith("$")) moduleName.dropRight(1) else moduleName requiredModule(className) - } given symExtensions: AnyRef with extension (sym: Symbol) @@ -141,28 +127,25 @@ object DottyBackendInterface { * for such objects will get a MODULE$ flag and a corresponding static initializer. */ def isStaticModuleClass(using Context): Boolean = - (sym.is(Module)) && { + (sym.is(Module)) `&&`: // scalac uses atPickling here // this would not work if modules are created after pickling // for example by specialization val original = toDenot(sym).initial val validity = original.validFor - atPhase(validity.phaseId) { + atPhase(validity.phaseId): toDenot(sym).isStatic - } - } def originalLexicallyEnclosingClass(using Context): Symbol = // used to populate the EnclosingMethod attribute. // it is very tricky in presence of classes(and annonymous classes) defined inside supper calls. - if (sym.exists) { + if (sym.exists) val validity = toDenot(sym).initial.validFor - atPhase(validity.phaseId) { + atPhase(validity.phaseId): toDenot(sym).lexicallyEnclosingClass - } - } else NoSymbol + else NoSymbol /** * True for module classes of package level objects. The backend will generate a mirror class for @@ -170,9 +153,8 @@ object DottyBackendInterface { */ def isTopLevelModuleClass(using Context): Boolean = sym.is(ModuleClass) && - atPhase(flattenPhase) { + atPhase(flattenPhase): toDenot(sym).owner.is(PackageClass) - } def javaSimpleName(using Context): String = toDenot(sym).name.mangledString def javaClassName(using Context): String = toDenot(sym).fullName.mangledString @@ -198,8 +180,6 @@ object DottyBackendInterface { * True if the current compilation unit is of a primitive class (scala.Boolean et al). * Used only in assertions. */ - def isCompilingPrimitive(using Context) = { + def isCompilingPrimitive(using Context) = primitiveCompilationUnits(ctx.compilationUnit.source.file.name) - } -} diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala index 469a6ea57679..a7becb450880 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCode.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCode.scala @@ -17,16 +17,15 @@ class GenBCode extends Phase { self => override def description: String = GenBCode.description private val superCallsMap = new MutableSymbolMap[Set[ClassSymbol]] - def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = { + def registerSuperCall(sym: Symbol, calls: ClassSymbol): Unit = val old = superCallsMap.getOrElse(sym, Set.empty) superCallsMap.update(sym, old + calls) - } private val entryPoints = new mutable.HashSet[String]() def registerEntryPoint(s: String): Unit = entryPoints += s private var _backendInterface: DottyBackendInterface = _ - def backendInterface(using ctx: Context): DottyBackendInterface = { + def backendInterface(using ctx: Context): DottyBackendInterface = if _backendInterface eq null then // Enforce usage of FreshContext so we would be able to modify compilation unit between runs val backendCtx = ctx match @@ -34,37 +33,32 @@ class GenBCode extends Phase { self => case ctx => ctx.fresh _backendInterface = DottyBackendInterface(superCallsMap)(using backendCtx) _backendInterface - } private var _codeGen: CodeGen = _ - def codeGen(using Context): CodeGen = { + def codeGen(using Context): CodeGen = if _codeGen eq null then val int = backendInterface val dottyPrimitives = new DottyPrimitives(ctx) _codeGen = new CodeGen(int, dottyPrimitives)(bTypes.asInstanceOf[BTypesFromSymbols[int.type]]) _codeGen - } private var _bTypes: BTypesFromSymbols[DottyBackendInterface] = _ - def bTypes(using Context): BTypesFromSymbols[DottyBackendInterface] = { + def bTypes(using Context): BTypesFromSymbols[DottyBackendInterface] = if _bTypes eq null then _bTypes = BTypesFromSymbols(backendInterface, frontendAccess) _bTypes - } private var _frontendAccess: PostProcessorFrontendAccess | Null = _ - def frontendAccess(using Context): PostProcessorFrontendAccess = { + def frontendAccess(using Context): PostProcessorFrontendAccess = if _frontendAccess eq null then _frontendAccess = PostProcessorFrontendAccess.Impl(backendInterface, entryPoints) _frontendAccess.nn - } private var _postProcessor: PostProcessor | Null = _ - def postProcessor(using Context): PostProcessor = { + def postProcessor(using Context): PostProcessor = if _postProcessor eq null then _postProcessor = new PostProcessor(frontendAccess, bTypes) _postProcessor.nn - } override def run(using ctx: Context): Unit = // CompilationUnit is the only component that will differ between each run invocation @@ -78,17 +72,16 @@ class GenBCode extends Phase { self => // In Scala 3, we don't perform backend optimizations and always perform post-processing immediately. // https://github.com/scala/scala/pull/6057 postProcessor.postProcessAndSendToDisk(generated) - (ctx.compilerCallback: CompilerCallback | Null) match { + (ctx.compilerCallback: CompilerCallback | Null) match case cb: CompilerCallback => cb.onSourceCompiled(ctx.source) case null => () - } - override def runOn(units: List[CompilationUnit])(using ctx:Context): List[CompilationUnit] = { + override def runOn(units: List[CompilationUnit])(using ctx:Context): List[CompilationUnit] = try super.runOn(units) finally // frontendAccess and postProcessor are created lazilly, clean them up only if they were initialized if _frontendAccess ne null then - frontendAccess.compilerSettings.outputDirectory match { + frontendAccess.compilerSettings.outputDirectory match case jar: JarArchive => if (ctx.run.nn.suspendedUnits.nonEmpty) // If we close the jar the next run will not be able to write on the jar. @@ -97,13 +90,10 @@ class GenBCode extends Phase { self => jar.close() case _ => () - } if _postProcessor ne null then postProcessor.classfileWriter.close() - } } -object GenBCode { +object GenBCode: val name: String = "genBCode" val description: String = "generate JVM bytecode" -} diff --git a/compiler/src/dotty/tools/backend/jvm/GenBCodeOps.scala b/compiler/src/dotty/tools/backend/jvm/GenBCodeOps.scala index 210e47566cb9..ac72421279ba 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenBCodeOps.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenBCodeOps.scala @@ -6,11 +6,10 @@ import scala.tools.asm object GenBCodeOps extends GenBCodeOps -class GenBCodeOps { +class GenBCodeOps: extension (flags: Int) def addFlagIf(cond: Boolean, flag: Int): Int = if cond then flags | flag else flags final val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC final val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL final val PrivateStaticFinal = asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL -} diff --git a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala index c16bc70fc3b0..29871dc291c7 100644 --- a/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala +++ b/compiler/src/dotty/tools/backend/jvm/GenericSignatureVisitor.scala @@ -12,7 +12,7 @@ import scala.jdk.CollectionConverters._ // Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf // https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L928 -abstract class GenericSignatureVisitor(nestedOnly: Boolean) { +abstract class GenericSignatureVisitor(nestedOnly: Boolean): // For performance (`Char => Boolean` is not specialized) private trait CharBooleanFunction { def apply(c: Char): Boolean } @@ -21,22 +21,19 @@ abstract class GenericSignatureVisitor(nestedOnly: Boolean) { def raiseError(msg: String, sig: String, e: Option[Throwable] = None): Unit - def visitClassSignature(sig: String): Unit = if (sig != null) { + def visitClassSignature(sig: String): Unit = if (sig != null) val p = new Parser(sig, nestedOnly) p.safely { p.classSignature() } - } - def visitMethodSignature(sig: String): Unit = if (sig != null) { + def visitMethodSignature(sig: String): Unit = if (sig != null) val p = new Parser(sig, nestedOnly) p.safely { p.methodSignature() } - } - def visitFieldSignature(sig: String): Unit = if (sig != null) { + def visitFieldSignature(sig: String): Unit = if (sig != null) val p = new Parser(sig, nestedOnly) p.safely { p.fieldSignature() } - } - private final class Parser(sig: String, nestedOnly: Boolean) { + private final class Parser(sig: String, nestedOnly: Boolean): private var index = 0 private val end = sig.length @@ -44,92 +41,77 @@ abstract class GenericSignatureVisitor(nestedOnly: Boolean) { private val Aborted: Throwable = new NoStackTrace { } private def abort(): Nothing = throw Aborted - @inline def safely(f: => Unit): Unit = try f catch { + @inline def safely(f: => Unit): Unit = try f catch case Aborted => case NonFatal(e) => raiseError(s"Exception thrown during signature parsing", sig, Some(e)) - } - private def current = { - if (index >= end) { + private def current = + if (index >= end) raiseError(s"Out of bounds, $index >= $end", sig) abort() // Don't continue, even if `notifyInvalidSignature` returns - } sig.charAt(index) - } - private def accept(c: Char): Unit = { - if (current != c) { + private def accept(c: Char): Unit = + if (current != c) raiseError(s"Expected $c at $index, found $current", sig) abort() - } index += 1 - } private def skip(): Unit = { index += 1 } private def getCurrentAndSkip(): Char = { val c = current; skip(); c } - private def skipUntil(isDelimiter: CharBooleanFunction): Unit = { + private def skipUntil(isDelimiter: CharBooleanFunction): Unit = while (!isDelimiter(current)) { index += 1 } - } - private def skipUntilDelimiter(delimiter: Char): Unit = { - sig.indexOf(delimiter, index) match { + private def skipUntilDelimiter(delimiter: Char): Unit = + sig.indexOf(delimiter, index) match case -1 => raiseError(s"Out of bounds", sig) abort() // Don't continue, even if `notifyInvalidSignature` returns case i => index = i - } - } - private def appendUntil(builder: java.lang.StringBuilder, isDelimiter: CharBooleanFunction): Unit = { + private def appendUntil(builder: java.lang.StringBuilder, isDelimiter: CharBooleanFunction): Unit = val start = index skipUntil(isDelimiter) builder.append(sig, start, index) - } - def isBaseType(c: Char): Boolean = c match { + def isBaseType(c: Char): Boolean = c match case 'B' | 'C' | 'D' | 'F' | 'I' | 'J' | 'S' | 'Z' => true case _ => false - } private val isClassNameEnd: CharBooleanFunction = (c: Char) => c == '<' || c == '.' || c == ';' - private def typeArguments(): Unit = if (current == '<') { + private def typeArguments(): Unit = if (current == '<') skip() - while (current != '>') current match { + while (current != '>') current match case '*' | '+' | '-' => skip() case _ => referenceTypeSignature() - } accept('>') - } - @tailrec private def referenceTypeSignature(): Unit = getCurrentAndSkip() match { + @tailrec private def referenceTypeSignature(): Unit = getCurrentAndSkip() match case 'L' => var names: java.lang.StringBuilder = null val start = index var seenDollar = false - while (!isClassNameEnd(current)) { + while (!isClassNameEnd(current)) seenDollar ||= current == '$' index += 1 - } - if ((current == '.' || seenDollar) || !nestedOnly) { + if ((current == '.' || seenDollar) || !nestedOnly) // OPT: avoid allocations when only a top-level class is encountered names = new java.lang.StringBuilder(32) names.append(sig, start, index) visitInternalName(names.toString) - } typeArguments() - while (current == '.') { + while (current == '.') skip() names.append('$') appendUntil(names, isClassNameEnd) visitInternalName(names.toString) typeArguments() - } accept(';') case 'T' => @@ -139,54 +121,44 @@ abstract class GenericSignatureVisitor(nestedOnly: Boolean) { case '[' => if (isBaseType(current)) skip() else referenceTypeSignature() - } - private def typeParameters(): Unit = if (current == '<') { + private def typeParameters(): Unit = if (current == '<') skip() - while (current != '>') { + while (current != '>') skipUntilDelimiter(':'); skip() val c = current // The ClassBound can be missing, but only if there's an InterfaceBound after. // This is an assumption that's not in the spec, see https://stackoverflow.com/q/44284928 if (c != ':' && c != '>') { referenceTypeSignature() } while (current == ':') { skip(); referenceTypeSignature() } - } accept('>') - } - def classSignature(): Unit = { + def classSignature(): Unit = typeParameters() while (index < end) referenceTypeSignature() - } - def methodSignature(): Unit = { + def methodSignature(): Unit = typeParameters() accept('(') - while (current != ')') { + while (current != ')') if (isBaseType(current)) skip() else referenceTypeSignature() - } accept(')') if (current == 'V' || isBaseType(current)) skip() else referenceTypeSignature() - while (index < end) { + while (index < end) accept('^') referenceTypeSignature() - } - } - def fieldSignature(): Unit = if (sig != null) safely { + def fieldSignature(): Unit = if (sig != null) safely: referenceTypeSignature() - } - } -} // Backported from scala/scala, commit sha: 724be0e9425b9ad07c244d25efdad695d75abbcf // https://github.com/scala/scala/blob/724be0e9425b9ad07c244d25efdad695d75abbcf/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala#L790 -abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSignatureVisitor(nestedOnly) { +abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSignatureVisitor(nestedOnly): type InternalName = String def declaredNestedClasses(internalName: InternalName): List[T] @@ -196,12 +168,11 @@ abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSig val referredInnerClasses = mutable.Set.empty[T] def innerClasses: collection.Set[T] = declaredInnerClasses ++ referredInnerClasses - def clear(): Unit = { + def clear(): Unit = declaredInnerClasses.clear() referredInnerClasses.clear() - } - def visit(classNode: ClassNode): Unit = { + def visit(classNode: ClassNode): Unit = visitInternalName(classNode.name) declaredInnerClasses ++= declaredNestedClasses(classNode.name) @@ -216,16 +187,15 @@ abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSig visitClassSignature(classNode.signature) - for (f <- classNode.fields.asScala) { + for (f <- classNode.fields.asScala) visitDescriptor(f.desc) visitAnnotations(f.visibleAnnotations) visitAnnotations(f.visibleTypeAnnotations) visitAnnotations(f.invisibleAnnotations) visitAnnotations(f.invisibleTypeAnnotations) visitFieldSignature(f.signature) - } - for (m <- classNode.methods.asScala) { + for (m <- classNode.methods.asScala) visitDescriptor(m.desc) visitAnnotations(m.visibleAnnotations) @@ -241,7 +211,7 @@ abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSig for (tcb <- m.tryCatchBlocks.asScala) visitInternalName(tcb.`type`) val iter = m.instructions.iterator - while (iter.hasNext) iter.next() match { + while (iter.hasNext) iter.next() match case ti: TypeInsnNode => visitInternalNameOrArrayReference(ti.desc) case fi: FieldInsnNode => visitInternalNameOrArrayReference(fi.owner); visitDescriptor(fi.desc) case mi: MethodInsnNode => visitInternalNameOrArrayReference(mi.owner); visitDescriptor(mi.desc) @@ -249,48 +219,40 @@ abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSig case ci: LdcInsnNode => visitConstant(ci.cst) case ma: MultiANewArrayInsnNode => visitDescriptor(ma.desc) case _ => - } visitMethodSignature(m.signature) - } - } - private def containsChar(s: String, offset: Int, length: Int, char: Char): Boolean = { + private def containsChar(s: String, offset: Int, length: Int, char: Char): Boolean = val ix = s.indexOf(char, offset) !(ix == -1 || ix >= offset + length) - } - def visitInternalName(internalName: String, offset: Int, length: Int): Unit = if (internalName != null && containsChar(internalName, offset, length, '$')) { + def visitInternalName(internalName: String, offset: Int, length: Int): Unit = if (internalName != null && containsChar(internalName, offset, length, '$')) for (c <- getClassIfNested(internalName.substring(offset, length))) if (!declaredInnerClasses.contains(c)) referredInnerClasses += c - } // either an internal/Name or [[Linternal/Name; -- there are certain references in classfiles // that are either an internal name (without the surrounding `L;`) or an array descriptor // `[Linternal/Name;`. - def visitInternalNameOrArrayReference(ref: String): Unit = if (ref != null) { + def visitInternalNameOrArrayReference(ref: String): Unit = if (ref != null) val bracket = ref.lastIndexOf('[') if (bracket == -1) visitInternalName(ref) else if (ref.charAt(bracket + 1) == 'L') visitInternalName(ref, bracket + 2, ref.length - 1) - } // we are only interested in the class references in the descriptor, so we can skip over // primitives and the brackets of array descriptors - def visitDescriptor(desc: String): Unit = (desc.charAt(0): @switch) match { + def visitDescriptor(desc: String): Unit = (desc.charAt(0): @switch) match case '(' => var i = 1 - while (i < desc.length) { - if (desc.charAt(i) == 'L') { + while (i < desc.length) + if (desc.charAt(i) == 'L') val start = i + 1 // skip the L var seenDollar = false while ({val ch = desc.charAt(i); seenDollar ||= (ch == '$'); ch != ';'}) i += 1 if (seenDollar) visitInternalName(desc, start, i) - } // skips over '[', ')', primitives i += 1 - } case 'L' => visitInternalName(desc, 1, desc.length - 1) @@ -299,28 +261,23 @@ abstract class NestedClassesCollector[T](nestedOnly: Boolean) extends GenericSig visitInternalNameOrArrayReference(desc) case _ => // skip over primitive types - } - def visitConstant(const: AnyRef): Unit = const match { + def visitConstant(const: AnyRef): Unit = const match case t: Type => visitDescriptor(t.getDescriptor) case _ => - } // in principle we could references to annotation types, as they only end up as strings in the // constant pool, not as class references. however, the java compiler still includes nested // annotation classes in the innerClass table, so we do the same. explained in detail in the // large comment in class BTypes. - def visitAnnotation(annot: AnnotationNode): Unit = { + def visitAnnotation(annot: AnnotationNode): Unit = visitDescriptor(annot.desc) if (annot.values != null) annot.values.asScala foreach visitConstant - } def visitAnnotations(annots: java.util.List[_ <: AnnotationNode]) = if (annots != null) annots.asScala foreach visitAnnotation def visitAnnotationss(annotss: Array[java.util.List[AnnotationNode]]) = if (annotss != null) annotss foreach visitAnnotations - def visitHandle(handle: Handle): Unit = { + def visitHandle(handle: Handle): Unit = visitInternalNameOrArrayReference(handle.getOwner) visitDescriptor(handle.getDesc) - } -} diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala index 606b5645aa24..88524ab01ac7 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessor.scala @@ -12,7 +12,7 @@ import scala.tools.asm.tree.ClassNode * Implements late stages of the backend that don't depend on a Global instance, i.e., * optimizations, post-processing and classfile serialization and writing. */ -class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: BTypes) { +class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: BTypes): self => import bTypes.* import frontendAccess.{backendReporting, compilerSettings} @@ -21,9 +21,9 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: val backendUtils = new BackendUtils(this) val classfileWriter = ClassfileWriter(frontendAccess) - def postProcessAndSendToDisk(generatedDefs: GeneratedDefs): Unit = { + def postProcessAndSendToDisk(generatedDefs: GeneratedDefs): Unit = val GeneratedDefs(classes, tasty) = generatedDefs - for (GeneratedClass(classNode, sourceFile, isArtifact, onFileCreated) <- classes) { + for (GeneratedClass(classNode, sourceFile, isArtifact, onFileCreated) <- classes) val bytes = try if !isArtifact then setSerializableLambdas(classNode) @@ -38,39 +38,32 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: backendReporting.error(em"Error while emitting ${classNode.name}\n${ex.getMessage}") null - if (bytes != null) { + if (bytes != null) if (AsmUtils.traceSerializedClassEnabled && classNode.name.nn.contains(AsmUtils.traceSerializedClassPattern)) AsmUtils.traceClass(bytes) val clsFile = classfileWriter.writeClass(classNode.name.nn, bytes, sourceFile) if clsFile != null then onFileCreated(clsFile) - } - } - for (GeneratedTasty(classNode, binaryGen) <- tasty){ + for (GeneratedTasty(classNode, binaryGen) <- tasty) classfileWriter.writeTasty(classNode.name.nn, binaryGen()) - } - } - private def setSerializableLambdas(classNode: ClassNode): Unit = { + private def setSerializableLambdas(classNode: ClassNode): Unit = import backendUtils.{collectSerializableLambdas, addLambdaDeserialize} val serializableLambdas = collectSerializableLambdas(classNode) if serializableLambdas.nonEmpty then addLambdaDeserialize(classNode, serializableLambdas) - } - private def setInnerClasses(classNode: ClassNode): Unit = { + private def setInnerClasses(classNode: ClassNode): Unit = import backendUtils.{collectNestedClasses, addInnerClasses} classNode.innerClasses.nn.clear() val (declared, referred) = collectNestedClasses(classNode) addInnerClasses(classNode, declared, referred) - } - def serializeClass(classNode: ClassNode): Array[Byte] = { + def serializeClass(classNode: ClassNode): Array[Byte] = val cw = new ClassWriterWithBTypeLub(backendUtils.extraProc) classNode.accept(cw) cw.toByteArray.nn - } // ----------------------------------------------------------------------------------------- // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM) @@ -84,14 +77,14 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: * The internal name of the least common ancestor of the types given by inameA and inameB. * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow */ - final class ClassWriterWithBTypeLub(flags: Int) extends ClassWriter(flags) { + final class ClassWriterWithBTypeLub(flags: Int) extends ClassWriter(flags): /** * This method is used by asm when computing stack map frames. It is thread-safe: it depends * only on the BTypes component, which does not depend on global. * TODO @lry move to a different place where no global is in scope, on bTypes. */ - override def getCommonSuperClass(inameA: String, inameB: String): String = { + override def getCommonSuperClass(inameA: String, inameB: String): String = // All types that appear in a class node need to have their ClassBType cached, see [[cachedClassBType]]. val a = classBTypeFromInternalName(inameA) val b = classBTypeFromInternalName(inameB) @@ -99,9 +92,6 @@ class PostProcessor(val frontendAccess: PostProcessorFrontendAccess, val bTypes: val lubName = lub.internalName assert(lubName != "scala/Any") lubName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things. - } - } -} /** * The result of code generation. [[isArtifact]] is `true` for mirror. @@ -111,7 +101,6 @@ case class GeneratedTasty(classNode: ClassNode, tastyGen: () => Array[Byte]) case class GeneratedDefs(classes: List[GeneratedClass], tasty: List[GeneratedTasty]) // Temporary class, will be refactored in a future commit -trait ClassWriterForPostProcessor { +trait ClassWriterForPostProcessor: type InternalName = String def write(bytes: Array[Byte], className: InternalName, sourceFile: AbstractFile): Unit -} diff --git a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala index 80ee68bc94c3..423595e30940 100644 --- a/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala +++ b/compiler/src/dotty/tools/backend/jvm/PostProcessorFrontendAccess.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.core.Phases * Functionality needed in the post-processor whose implementation depends on the compiler * frontend. All methods are synchronized. */ -sealed abstract class PostProcessorFrontendAccess { +sealed abstract class PostProcessorFrontendAccess: import PostProcessorFrontendAccess._ def compilerSettings: CompilerSettings @@ -22,10 +22,9 @@ sealed abstract class PostProcessorFrontendAccess { private val frontendLock: AnyRef = new Object() inline final def frontendSynch[T](inline x: => T): T = frontendLock.synchronized(x) -} -object PostProcessorFrontendAccess { - sealed trait CompilerSettings { +object PostProcessorFrontendAccess: + sealed trait CompilerSettings: def debug: Boolean def target: String // javaOutputVersion @@ -33,19 +32,17 @@ object PostProcessorFrontendAccess { def outputDirectory: AbstractFile def mainClass: Option[String] - } - sealed trait BackendReporting { + sealed trait BackendReporting: def error(message: Context ?=> Message): Unit def warning(message: Context ?=> Message): Unit def log(message: Context ?=> String): Unit - } - class Impl[I <: DottyBackendInterface](val int: I, entryPoints: HashSet[String]) extends PostProcessorFrontendAccess { + class Impl[I <: DottyBackendInterface](val int: I, entryPoints: HashSet[String]) extends PostProcessorFrontendAccess: import int.given lazy val compilerSettings: CompilerSettings = buildCompilerSettings() - private def buildCompilerSettings(): CompilerSettings = new CompilerSettings { + private def buildCompilerSettings(): CompilerSettings = new CompilerSettings: extension [T](s: dotty.tools.dotc.config.Settings.Setting[T]) def valueSetByUser: Option[T] = Option(s.value).filter(_ != s.default) @@ -66,14 +63,10 @@ object PostProcessorFrontendAccess { lazy val dumpClassesDirectory: Option[String] = s.Ydumpclasses.valueSetByUser lazy val outputDirectory: AbstractFile = s.outputDir.value lazy val mainClass: Option[String] = s.XmainClass.valueSetByUser - } - object backendReporting extends BackendReporting { + object backendReporting extends BackendReporting: def error(message: Context ?=> Message): Unit = frontendSynch(report.error(message)) def warning(message: Context ?=> Message): Unit = frontendSynch(report.warning(message)) def log(message: Context ?=> String): Unit = frontendSynch(report.log(message)) - } def getEntryPoints: List[String] = frontendSynch(entryPoints.toList) - } -} \ No newline at end of file diff --git a/compiler/src/dotty/tools/backend/jvm/Primitives.scala b/compiler/src/dotty/tools/backend/jvm/Primitives.scala index c9ddfeab24e1..e660d313b946 100644 --- a/compiler/src/dotty/tools/backend/jvm/Primitives.scala +++ b/compiler/src/dotty/tools/backend/jvm/Primitives.scala @@ -4,13 +4,13 @@ package jvm import java.io.PrintWriter -object Primitives { +object Primitives: /** This class represents a primitive operation. */ class Primitive { } /** This class represents a test operation. */ - sealed abstract class TestOp { + sealed abstract class TestOp: /** Returns the negation of this operation. */ def negate(): TestOp @@ -24,61 +24,54 @@ object Primitives { /** used only from GenASM */ def opcodeIFICMP(): Int - } /** An equality test */ - case object EQ extends TestOp { + case object EQ extends TestOp: def negate() = NE override def toString() = "EQ" override def opcodeIF() = scala.tools.asm.Opcodes.IFEQ override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPEQ - } /** A non-equality test */ - case object NE extends TestOp { + case object NE extends TestOp: def negate() = EQ override def toString() = "NE" override def opcodeIF() = scala.tools.asm.Opcodes.IFNE override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPNE - } /** A less-than test */ - case object LT extends TestOp { + case object LT extends TestOp: def negate() = GE override def toString() = "LT" override def opcodeIF() = scala.tools.asm.Opcodes.IFLT override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLT - } /** A greater-than-or-equal test */ - case object GE extends TestOp { + case object GE extends TestOp: def negate() = LT override def toString() = "GE" override def opcodeIF() = scala.tools.asm.Opcodes.IFGE override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGE - } /** A less-than-or-equal test */ - case object LE extends TestOp { + case object LE extends TestOp: def negate() = GT override def toString() = "LE" override def opcodeIF() = scala.tools.asm.Opcodes.IFLE override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPLE - } /** A greater-than test */ - case object GT extends TestOp { + case object GT extends TestOp: def negate() = LE override def toString() = "GT" override def opcodeIF() = scala.tools.asm.Opcodes.IFGT override def opcodeIFICMP() = scala.tools.asm.Opcodes.IF_ICMPGT - } /** This class represents an arithmetic operation. */ - class ArithmeticOp { + class ArithmeticOp: /** Returns a string representation of this operation. */ - override def toString(): String = this match { + override def toString(): String = this match case ADD => "ADD" case SUB => "SUB" case MUL => "MUL" @@ -86,8 +79,6 @@ object Primitives { case REM => "REM" case NOT => "NOT" case _ => throw new RuntimeException("ArithmeticOp unknown case") - } - } /** An arithmetic addition operation */ case object ADD extends ArithmeticOp @@ -108,16 +99,14 @@ object Primitives { case object NOT extends ArithmeticOp /** This class represents a shift operation. */ - class ShiftOp { + class ShiftOp: /** Returns a string representation of this operation. */ - override def toString(): String = this match { + override def toString(): String = this match case LSL => "LSL" case ASR => "ASR" case LSR => "LSR" case _ => throw new RuntimeException("ShitOp unknown case") - } - } /** A logical shift to the left */ case object LSL extends ShiftOp @@ -129,16 +118,14 @@ object Primitives { case object LSR extends ShiftOp /** This class represents a logical operation. */ - class LogicalOp { + class LogicalOp: /** Returns a string representation of this operation. */ - override def toString(): String = this match { + override def toString(): String = this match case AND => "AND" case OR => "OR" case XOR => "XOR" case _ => throw new RuntimeException("LogicalOp unknown case") - } - } /** A bitwise AND operation */ case object AND extends LogicalOp @@ -161,24 +148,20 @@ object Primitives { case object EndConcat extends Primitive /** Pretty printer for primitives */ - class PrimitivePrinter(out: PrintWriter) { - def print(s: String): PrimitivePrinter = { + class PrimitivePrinter(out: PrintWriter): + def print(s: String): PrimitivePrinter = out.print(s) this - } - } /** This class represents a comparison operation. */ - class ComparisonOp { + class ComparisonOp: /** Returns a string representation of this operation. */ - override def toString(): String = this match { + override def toString(): String = this match case CMPL => "CMPL" case CMP => "CMP" case CMPG => "CMPG" case _ => throw new RuntimeException("ComparisonOp unknown case") - } - } /** A comparison operation with -1 default for NaNs */ case object CMPL extends ComparisonOp @@ -188,4 +171,3 @@ object Primitives { /** A comparison operation with +1 default for NaNs */ case object CMPG extends ComparisonOp -} diff --git a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala index bc453aec17af..498b766f0391 100644 --- a/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala +++ b/compiler/src/dotty/tools/backend/jvm/scalaPrimitives.scala @@ -31,15 +31,14 @@ import scala.annotation.threadUnsafe * * Inspired from the `scalac` compiler. */ -class DottyPrimitives(ictx: Context) { +class DottyPrimitives(ictx: Context): import dotty.tools.backend.ScalaPrimitivesOps._ @threadUnsafe private lazy val primitives: ReadOnlyMap[Symbol, Int] = init /** Return the code for the given symbol. */ - def getPrimitive(sym: Symbol): Int = { + def getPrimitive(sym: Symbol): Int = primitives(sym) - } /** * Return the primitive code of the given operation. If the @@ -50,10 +49,10 @@ class DottyPrimitives(ictx: Context) { * @param tpe The type of the receiver object. It is used only for array * operations */ - def getPrimitive(app: Apply, tpe: Type)(using Context): Int = { + def getPrimitive(app: Apply, tpe: Type)(using Context): Int = val fun = app.fun.symbol val defn = ctx.definitions - val code = app.fun match { + val code = app.fun match case Select(_, nme.primitive.arrayLength) => LENGTH case Select(_, nme.primitive.arrayUpdate) => @@ -61,20 +60,18 @@ class DottyPrimitives(ictx: Context) { case Select(_, nme.primitive.arrayApply) => APPLY case _ => getPrimitive(fun) - } - def elementType: Type = tpe.widenDealias match { + def elementType: Type = tpe.widenDealias match case defn.ArrayOf(el) => el case JavaArrayType(el) => el case _ => report.error(em"expected Array $tpe") UnspecifiedErrorType - } - code match { + code match case APPLY => - defn.scalaClassName(elementType) match { + defn.scalaClassName(elementType) match case tpnme.Boolean => ZARRAY_GET case tpnme.Byte => BARRAY_GET case tpnme.Short => SARRAY_GET @@ -84,10 +81,9 @@ class DottyPrimitives(ictx: Context) { case tpnme.Float => FARRAY_GET case tpnme.Double => DARRAY_GET case _ => OARRAY_GET - } case UPDATE => - defn.scalaClassName(elementType) match { + defn.scalaClassName(elementType) match case tpnme.Boolean => ZARRAY_SET case tpnme.Byte => BARRAY_SET case tpnme.Short => SARRAY_SET @@ -97,10 +93,9 @@ class DottyPrimitives(ictx: Context) { case tpnme.Float => FARRAY_SET case tpnme.Double => DARRAY_SET case _ => OARRAY_SET - } case LENGTH => - defn.scalaClassName(elementType) match { + defn.scalaClassName(elementType) match case tpnme.Boolean => ZARRAY_LENGTH case tpnme.Byte => BARRAY_LENGTH case tpnme.Short => SARRAY_LENGTH @@ -110,15 +105,12 @@ class DottyPrimitives(ictx: Context) { case tpnme.Float => FARRAY_LENGTH case tpnme.Double => DARRAY_LENGTH case _ => OARRAY_LENGTH - } case _ => code - } - } /** Initialize the primitive map */ - private def init: ReadOnlyMap[Symbol, Int] = { + private def init: ReadOnlyMap[Symbol, Int] = given Context = ictx @@ -126,12 +118,11 @@ class DottyPrimitives(ictx: Context) { val primitives = Symbols.MutableSymbolMap[Int](512) /** Add a primitive operation to the map */ - def addPrimitive(s: Symbol, code: Int): Unit = { + def addPrimitive(s: Symbol, code: Int): Unit = assert(!(primitives contains s), "Duplicate primitive " + s) primitives(s) = code - } - def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { + def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = val alts = cls.info.member(method).alternatives.map(_.symbol) if (alts.isEmpty) report.error(em"Unknown primitive method $cls.$method") @@ -142,8 +133,7 @@ class DottyPrimitives(ictx: Context) { case _ => code } ) - ) - } + ) // scala.Any addPrimitive(defn.Any_==, EQ) @@ -395,7 +385,6 @@ class DottyPrimitives(ictx: Context) { primitives - } def isPrimitive(sym: Symbol): Boolean = primitives.contains(sym) @@ -409,4 +398,3 @@ class DottyPrimitives(ictx: Context) { case Select(_, StdNames.nme.clone_) => false // but array.clone is NOT a primitive op. case _ => true }) -} diff --git a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala index 1579b4577933..e6289e281984 100644 --- a/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala +++ b/compiler/src/dotty/tools/backend/sjs/GenSJSIR.scala @@ -5,7 +5,7 @@ import Contexts._ import Phases._ /** Generates Scala.js IR files for the compilation unit. */ -class GenSJSIR extends Phase { +class GenSJSIR extends Phase: override def phaseName: String = GenSJSIR.name @@ -16,7 +16,6 @@ class GenSJSIR extends Phase { def run(using Context): Unit = new JSCodeGen().run() -} object GenSJSIR: val name: String = "genSJSIR" diff --git a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala index eee791852fde..874b1d438b64 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSCodeGen.scala @@ -53,7 +53,7 @@ import ScopedVar.withScopedVars * - `genMethod()` and similar methods generate the declarations of methods. * - `genStatOrExpr()` and everything else generate the bodies of methods. */ -class JSCodeGen()(using genCtx: Context) { +class JSCodeGen()(using genCtx: Context): import JSCodeGen._ import tpd._ @@ -83,7 +83,7 @@ class JSCodeGen()(using genCtx: Context) { private val contextualJSClassValue = new ScopedVar[Option[js.Tree]](None) /** Resets all of the scoped state in the context of `body`. */ - private def resetAllScopedVars[T](body: => T): T = { + private def resetAllScopedVars[T](body: => T): T = withScopedVars( currentClassSym := null, currentMethodSym := null, @@ -91,48 +91,38 @@ class JSCodeGen()(using genCtx: Context) { thisLocalVarIdent := null, isModuleInitialized := null, undefinedDefaultParams := null - ) { + ): body - } - } - private def withPerMethodBodyState[A](methodSym: Symbol)(body: => A): A = { + private def withPerMethodBodyState[A](methodSym: Symbol)(body: => A): A = withScopedVars( currentMethodSym := methodSym, thisLocalVarIdent := None, isModuleInitialized := new ScopedVar.VarBox(false), undefinedDefaultParams := mutable.Set.empty, - ) { + ): body - } - } - private def acquireContextualJSClassValue[A](f: Option[js.Tree] => A): A = { + private def acquireContextualJSClassValue[A](f: Option[js.Tree] => A): A = val jsClassValue = contextualJSClassValue.get withScopedVars( contextualJSClassValue := None - ) { + ): f(jsClassValue) - } - } - def withNewLocalNameScope[A](body: => A): A = { - withScopedVars(localNames := new LocalNameGenerator) { + def withNewLocalNameScope[A](body: => A): A = + withScopedVars(localNames := new LocalNameGenerator): body - } - } /** Implicitly materializes the current local name generator. */ implicit def implicitLocalNames: LocalNameGenerator = localNames.get - def currentThisType: jstpe.Type = { - encodeClassType(currentClassSym) match { + def currentThisType: jstpe.Type = + encodeClassType(currentClassSym) match case tpe @ jstpe.ClassType(cls) => jstpe.BoxedClassToPrimType.getOrElse(cls, tpe) case tpe => tpe - } - } /** Returns a new fresh local identifier. */ private def freshLocalIdent()(implicit pos: Position): js.LocalIdent = @@ -146,26 +136,22 @@ class JSCodeGen()(using genCtx: Context) { private def freshLocalIdent(base: TermName)(implicit pos: Position): js.LocalIdent = localNames.get.freshLocalIdent(base) - private def consumeLazilyGeneratedAnonClass(sym: Symbol): TypeDef = { + private def consumeLazilyGeneratedAnonClass(sym: Symbol): TypeDef = val typeDef = lazilyGeneratedAnonClasses.remove(sym) - if (typeDef == null) { + if (typeDef == null) throw new FatalError( i"Could not find tree for lazily generated anonymous class ${sym.fullName} at ${sym.sourcePos}") - } else { + else typeDef - } - } // Compilation unit -------------------------------------------------------- - def run(): Unit = { - try { + def run(): Unit = + try genCompilationUnit(ctx.compilationUnit) - } finally { + finally generatedClasses.clear() generatedStaticForwarderClasses.clear() - } - } /** Generates the Scala.js IR for a compilation unit * This method iterates over all the class and interface definitions @@ -186,15 +172,13 @@ class JSCodeGen()(using genCtx: Context) { * * Interface -> `genInterface()` * * Normal class -> `genClass()` */ - private def genCompilationUnit(cunit: CompilationUnit): Unit = { - def collectTypeDefs(tree: Tree): List[TypeDef] = { - tree match { + private def genCompilationUnit(cunit: CompilationUnit): Unit = + def collectTypeDefs(tree: Tree): List[TypeDef] = + tree match case EmptyTree => Nil case PackageDef(_, stats) => stats.flatMap(collectTypeDefs) case cd: TypeDef => cd :: Nil case _: ValDef => Nil // module instance - } - } val allTypeDefs = collectTypeDefs(cunit.tpdTree) /* #13221 Set JavaStatic on all the Module fields of static module classes. @@ -205,14 +189,12 @@ class JSCodeGen()(using genCtx: Context) { * * However, here we only do this for Module fields, not all fields. */ - for (typeDef <- allTypeDefs) { - if (typeDef.symbol.is(ModuleClass)) { + for (typeDef <- allTypeDefs) + if (typeDef.symbol.is(ModuleClass)) typeDef.symbol.info.decls.foreach { f => if (f.isField && f.is(Module)) f.setFlag(JavaStatic) } - } - } val (anonJSClassTypeDefs, otherTypeDefs) = allTypeDefs.partition(td => td.symbol.isAnonymousClass && td.symbol.isJSType) @@ -222,7 +204,7 @@ class JSCodeGen()(using genCtx: Context) { lazilyGeneratedAnonClasses(td.symbol) = td /* Finally, we emit true code for the remaining class defs. */ - for (td <- otherTypeDefs) { + for (td <- otherTypeDefs) val sym = td.symbol implicit val pos: Position = sym.span @@ -230,30 +212,26 @@ class JSCodeGen()(using genCtx: Context) { val isPrimitive = sym.isPrimitiveValueClass || sym == defn.ArrayClass - if (!isPrimitive) { + if (!isPrimitive) withScopedVars( currentClassSym := sym - ) { - val tree = if (sym.isJSType) { + ): + val tree = if (sym.isJSType) if (!sym.is(Trait) && sym.isNonNativeJSClass) genNonNativeJSClass(td) else genRawJSClassData(td) - } else if (sym.is(Trait)) { + else if (sym.is(Trait)) genInterface(td) - } else { + else genScalaClass(td) - } generatedClasses += tree - } - } - } for (tree <- generatedClasses) genIRFile(cunit, tree) - if (generatedStaticForwarderClasses.nonEmpty) { + if (generatedStaticForwarderClasses.nonEmpty) /* #4148 Add generated static forwarder classes, except those that * would collide with regular classes on case insensitive file systems. */ @@ -274,43 +252,36 @@ class JSCodeGen()(using genCtx: Context) { val generatedCaseInsensitiveNames = generatedClasses.map(caseInsensitiveNameOf).toSet - for ((site, classDef) <- generatedStaticForwarderClasses) { - if (!generatedCaseInsensitiveNames.contains(caseInsensitiveNameOf(classDef))) { + for ((site, classDef) <- generatedStaticForwarderClasses) + if (!generatedCaseInsensitiveNames.contains(caseInsensitiveNameOf(classDef))) genIRFile(cunit, classDef) - } else { + else report.warning( s"Not generating the static forwarders of ${classDef.name.name.nameString} " + "because its name differs only in case from the name of another class or trait in this compilation unit.", site.srcPos) - } - } - } - } - private def genIRFile(cunit: CompilationUnit, tree: ir.Trees.ClassDef): Unit = { + private def genIRFile(cunit: CompilationUnit, tree: ir.Trees.ClassDef): Unit = val outfile = getFileFor(cunit, tree.name.name, ".sjsir") val output = outfile.bufferedOutput - try { + try ir.Serializers.serialize(output, tree) - } finally { + finally output.close() - } - } private def getFileFor(cunit: CompilationUnit, className: ClassName, - suffix: String): dotty.tools.io.AbstractFile = { + suffix: String): dotty.tools.io.AbstractFile = val outputDirectory = ctx.settings.outputDir.value val pathParts = className.nameString.split('.') val dir = pathParts.init.foldLeft(outputDirectory)(_.subdirectoryNamed(_)) val filename = pathParts.last dir.fileNamed(filename + suffix) - } // Generate a class -------------------------------------------------------- /** Gen the IR ClassDef for a Scala class definition (maybe a module class). */ - private def genScalaClass(td: TypeDef): js.ClassDef = { + private def genScalaClass(td: TypeDef): js.ClassDef = val sym = td.symbol.asClass implicit val pos: SourcePosition = sym.sourcePos @@ -318,12 +289,11 @@ class JSCodeGen()(using genCtx: Context) { "genScalaClass() must be called only for normal classes: "+sym) assert(sym.superClass != NoSymbol, sym) - if (hasDefaultCtorArgsAndJSModule(sym)) { + if (hasDefaultCtorArgsAndJSModule(sym)) report.error( "Implementation restriction: " + "constructors of Scala classes cannot have default parameters if their companion module is JS native.", td) - } val classIdent = encodeClassNameIdent(sym) val originalName = originalNameOfClass(sym) @@ -333,11 +303,10 @@ class JSCodeGen()(using genCtx: Context) { val isDynamicImportThunk = sym.isSubClass(jsdefn.DynamicImportThunkClass) - def isStdLibClassWithAdHocInlineAnnot(sym: Symbol): Boolean = { + def isStdLibClassWithAdHocInlineAnnot(sym: Symbol): Boolean = val fullName = sym.fullName.toString (fullName.startsWith("scala.Tuple") && !fullName.endsWith("$")) || (fullName.startsWith("scala.collection.mutable.ArrayOps$of")) - } val shouldMarkInline = ( isDynamicImportThunk || @@ -345,19 +314,18 @@ class JSCodeGen()(using genCtx: Context) { (sym.isAnonymousFunction && !sym.isSubClass(defn.PartialFunctionClass)) || isStdLibClassWithAdHocInlineAnnot(sym)) - val optimizerHints = { + val optimizerHints = OptimizerHints.empty .withInline(shouldMarkInline) .withNoinline(sym.hasAnnotation(jsdefn.NoinlineAnnot)) - } // Generate members (constructor + methods) val generatedNonFieldMembers = new mutable.ListBuffer[js.MemberDef] val tpl = td.rhs.asInstanceOf[Template] - for (tree <- tpl.constr :: tpl.body) { - tree match { + for (tree <- tpl.constr :: tpl.body) + tree match case EmptyTree => () case vd: ValDef => @@ -376,8 +344,6 @@ class JSCodeGen()(using genCtx: Context) { case _ => throw new FatalError("Illegal tree in body of genScalaClass(): " + tree) - } - } // Generate fields and add to methods + ctors val generatedMembers = genClassFields(td) ++ generatedNonFieldMembers.toList @@ -389,19 +355,17 @@ class JSCodeGen()(using genCtx: Context) { val topLevelExportDefs = jsExportsGen.genTopLevelExports(sym) // Static initializer - val optStaticInitializer = { + val optStaticInitializer = // Initialization of reflection data, if required - val reflectInit = { - val enableReflectiveInstantiation = { + val reflectInit = + val enableReflectiveInstantiation = sym.baseClasses.exists { ancestor => ancestor.hasAnnotation(jsdefn.EnableReflectiveInstantiationAnnot) } - } if (enableReflectiveInstantiation) genRegisterReflectiveInstantiation(sym).toList else Nil - } // Initialization of the module because of field exports val needsStaticModuleInit = @@ -415,7 +379,6 @@ class JSCodeGen()(using genCtx: Context) { List(genStaticConstructorWithStats(ir.Names.StaticInitializerName, js.Block(staticInitializerStats))) else Nil - } val optDynamicImportForwarder = if (isDynamicImportThunk) List(genDynamicImportForwarder(sym)) @@ -425,17 +388,17 @@ class JSCodeGen()(using genCtx: Context) { generatedMembers ::: memberExports ::: optStaticInitializer ::: optDynamicImportForwarder // Add static forwarders - val allMemberDefs = if (!isCandidateForForwarders(sym)) { + val allMemberDefs = if (!isCandidateForForwarders(sym)) allMemberDefsExceptStaticForwarders - } else { - if (isStaticModule(sym)) { + else + if (isStaticModule(sym)) /* If the module class has no linked class, we must create one to * hold the static forwarders. Otherwise, this is going to be handled * when generating the companion class. */ - if (!sym.linkedClass.exists) { + if (!sym.linkedClass.exists) val forwarders = genStaticForwardersFromModuleClass(Nil, sym) - if (forwarders.nonEmpty) { + if (forwarders.nonEmpty) val forwardersClassDef = js.ClassDef( js.ClassIdent(ClassName(classIdent.name.nameString.stripSuffix("$"))), originalName, @@ -449,15 +412,11 @@ class JSCodeGen()(using genCtx: Context) { Nil )(js.OptimizerHints.empty) generatedStaticForwarderClasses += sym -> forwardersClassDef - } - } allMemberDefsExceptStaticForwarders - } else { + else val forwarders = genStaticForwardersForClassOrInterface( allMemberDefsExceptStaticForwarders, sym) allMemberDefsExceptStaticForwarders ::: forwarders - } - } // Hashed definitions of the class val hashedDefs = ir.Hashers.hashMemberDefs(allMemberDefs) @@ -482,10 +441,9 @@ class JSCodeGen()(using genCtx: Context) { optimizerHints) classDefinition - } /** Gen the IR ClassDef for a Scala.js-defined JS class. */ - private def genNonNativeJSClass(td: TypeDef): js.ClassDef = { + private def genNonNativeJSClass(td: TypeDef): js.ClassDef = val sym = td.symbol.asClass implicit val pos: SourcePosition = sym.sourcePos @@ -493,12 +451,11 @@ class JSCodeGen()(using genCtx: Context) { i"genNonNativeJSClass() must be called only for non-native JS classes: $sym") assert(sym.superClass != NoSymbol, sym) - if (hasDefaultCtorArgsAndJSModule(sym)) { + if (hasDefaultCtorArgsAndJSModule(sym)) report.error( "Implementation restriction: " + "constructors of non-native JS classes cannot have default parameters if their companion module is JS native.", td) - } val classIdent = encodeClassNameIdent(sym) val originalName = originalNameOfClass(sym) @@ -510,8 +467,8 @@ class JSCodeGen()(using genCtx: Context) { val dispatchMethodNames = new mutable.ListBuffer[JSName] val tpl = td.rhs.asInstanceOf[Template] - for (tree <- tpl.constr :: tpl.body) { - tree match { + for (tree <- tpl.constr :: tpl.body) + tree match case EmptyTree => () case _: ValDef => @@ -521,52 +478,44 @@ class JSCodeGen()(using genCtx: Context) { val sym = dd.symbol val exposed = sym.isJSExposed - if (sym.isClassConstructor) { + if (sym.isClassConstructor) constructorTrees += dd - } else if (exposed && sym.is(Accessor, butNot = Lazy)) { + else if (exposed && sym.is(Accessor, butNot = Lazy)) { // Exposed accessors must not be emitted, since the field they access is enough. } else if (sym.hasAnnotation(jsdefn.JSOptionalAnnot)) { // Optional methods must not be emitted - } else { + } else generatedMethods ++= genMethod(dd) // Collect the names of the dispatchers we have to create - if (exposed && !sym.is(Deferred)) { + if (exposed && !sym.is(Deferred)) /* We add symbols that we have to expose here. This way we also * get inherited stuff that is implemented in this class. */ dispatchMethodNames += sym.jsName - } - } case _ => throw new FatalError("Illegal tree in gen of genNonNativeJSClass(): " + tree) - } - } // Static members (exported from the companion object) - val staticMembers = { + val staticMembers = val module = sym.companionModule - if (!module.exists) { + if (!module.exists) Nil - } else { + else val companionModuleClass = module.moduleClass - val exports = withScopedVars(currentClassSym := companionModuleClass) { + val exports = withScopedVars(currentClassSym := companionModuleClass): jsExportsGen.genStaticExports(companionModuleClass) - } - if (exports.exists(_.isInstanceOf[js.JSFieldDef])) { + if (exports.exists(_.isInstanceOf[js.JSFieldDef])) val classInitializer = genStaticConstructorWithStats(ir.Names.ClassInitializerName, genLoadModule(companionModuleClass)) exports :+ classInitializer - } else { + else exports - } - } - } val topLevelExports = jsExportsGen.genTopLevelExports(sym) - val (generatedConstructor, jsClassCaptures) = withNewLocalNameScope { + val (generatedConstructor, jsClassCaptures) = withNewLocalNameScope: val isNested = sym.isNestedJSClass if (isNested) @@ -574,26 +523,23 @@ class JSCodeGen()(using genCtx: Context) { val (captures, ctor) = genJSClassCapturesAndConstructor(constructorTrees.toList) - val jsClassCaptures = if (isNested) { + val jsClassCaptures = if (isNested) val superParam = js.ParamDef(js.LocalIdent(JSSuperClassParamName), NoOriginalName, jstpe.AnyType, mutable = false) Some(superParam :: captures) - } else { + else assert(captures.isEmpty, s"found non nested JS class with captures $captures at $pos") None - } (ctor, jsClassCaptures) - } // Generate fields (and add to methods + ctors) - val generatedMembers = { + val generatedMembers = genClassFields(td) ::: generatedConstructor :: jsExportsGen.genJSClassDispatchers(sym, dispatchMethodNames.result().distinct) ::: generatedMethods.toList ::: staticMembers - } // Hashed definitions of the class val hashedMemberDefs = ir.Hashers.hashMemberDefs(generatedMembers) @@ -617,20 +563,18 @@ class JSCodeGen()(using genCtx: Context) { OptimizerHints.empty) classDefinition - } /** Gen the IR ClassDef for a raw JS class or trait. */ - private def genRawJSClassData(td: TypeDef): js.ClassDef = { + private def genRawJSClassData(td: TypeDef): js.ClassDef = val sym = td.symbol.asClass implicit val pos: Position = sym.span val classIdent = encodeClassNameIdent(sym) - val kind = { + val kind = if (sym.is(Trait)) ClassKind.AbstractJSType else if (sym.is(ModuleClass)) ClassKind.NativeJSModuleClass else ClassKind.NativeJSClass - } val superClass = if (sym.is(Trait)) None else Some(encodeClassNameIdent(sym.superClass)) @@ -648,11 +592,10 @@ class JSCodeGen()(using genCtx: Context) { Nil, Nil)( OptimizerHints.empty) - } /** Gen the IR ClassDef for an interface definition. */ - private def genInterface(td: TypeDef): js.ClassDef = { + private def genInterface(td: TypeDef): js.ClassDef = val sym = td.symbol.asClass implicit val pos: SourcePosition = sym.sourcePos @@ -661,8 +604,8 @@ class JSCodeGen()(using genCtx: Context) { val generatedMethods = new mutable.ListBuffer[js.MethodDef] val tpl = td.rhs.asInstanceOf[Template] - for (tree <- tpl.constr :: tpl.body) { - tree match { + for (tree <- tpl.constr :: tpl.body) + tree match case EmptyTree => () case dd: DefDef => generatedMethods ++= genMethod(dd) case _ => @@ -670,8 +613,6 @@ class JSCodeGen()(using genCtx: Context) { i"""Illegal tree in gen of genInterface(): $tree |class = $td |in ${ctx.compilationUnit}""") - } - } val superInterfaces = genClassInterfaces(sym, forJSClass = false) @@ -695,17 +636,14 @@ class JSCodeGen()(using genCtx: Context) { hashedDefs, Nil)( OptimizerHints.empty) - } private def genClassInterfaces(sym: ClassSymbol, forJSClass: Boolean)( - implicit pos: Position): List[js.ClassIdent] = { + implicit pos: Position): List[js.ClassIdent] = for { intf <- sym.directlyInheritedTraits if !(forJSClass && intf == defn.DynamicClass) - } yield { + } yield encodeClassNameIdent(intf) - } - } // Static forwarders ------------------------------------------------------- @@ -743,15 +681,11 @@ class JSCodeGen()(using genCtx: Context) { * Other than the Scala.js-specific flag, and the fact that we also consider * interfaces, this performs the same tests as the JVM back-end. */ - def isCandidateForForwarders(sym: Symbol): Boolean = { - !ctx.settings.XnoForwarders.value && sym.isStatic && { - ctx.settings.scalajsGenStaticForwardersForNonTopLevelObjects.value || { - atPhase(flattenPhase) { + def isCandidateForForwarders(sym: Symbol): Boolean = + !ctx.settings.XnoForwarders.value && sym.isStatic `&&`: + ctx.settings.scalajsGenStaticForwardersForNonTopLevelObjects.value `||`: + atPhase(flattenPhase): toDenot(sym).owner.is(PackageClass) - } - } - } - } /** Gen the static forwarders to the members of a class or interface for * methods of its companion object. @@ -763,18 +697,16 @@ class JSCodeGen()(using genCtx: Context) { */ def genStaticForwardersForClassOrInterface( existingMembers: List[js.MemberDef], sym: Symbol)( - implicit pos: SourcePosition): List[js.MemberDef] = { + implicit pos: SourcePosition): List[js.MemberDef] = val module = sym.companionModule - if (!module.exists) { + if (!module.exists) Nil - } else { + else val moduleClass = module.moduleClass if (!moduleClass.isJSType) genStaticForwardersFromModuleClass(existingMembers, moduleClass) else Nil - } - } /** Gen the static forwarders for the methods of a module class. * @@ -782,7 +714,7 @@ class JSCodeGen()(using genCtx: Context) { */ def genStaticForwardersFromModuleClass(existingMembers: List[js.MemberDef], moduleClass: Symbol)( - implicit pos: SourcePosition): List[js.MemberDef] = { + implicit pos: SourcePosition): List[js.MemberDef] = assert(moduleClass.is(ModuleClass), moduleClass) @@ -792,47 +724,42 @@ class JSCodeGen()(using genCtx: Context) { name.name }.toSet - val members = { + val members = moduleClass.info.membersBasedOnFlags(required = Flags.Method, excluded = Flags.ExcludedForwarder).map(_.symbol) - } - def isExcluded(m: Symbol): Boolean = { + def isExcluded(m: Symbol): Boolean = def hasAccessBoundary = m.accessBoundary(defn.RootClass) ne defn.RootClass def isOfJLObject: Boolean = m.owner eq defn.ObjectClass - def isDefaultParamOfJSNativeDef: Boolean = { - m.name.is(DefaultGetterName) && { + def isDefaultParamOfJSNativeDef: Boolean = + m.name.is(DefaultGetterName) `&&`: val info = new DefaultParamInfo(m) !info.isForConstructor && info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) - } - } m.is(Deferred) || m.isConstructor || hasAccessBoundary || isOfJLObject || m.hasAnnotation(jsdefn.JSNativeAnnot) || isDefaultParamOfJSNativeDef // #4557 - } val forwarders = for { m <- members if !isExcluded(m) - } yield { - withNewLocalNameScope { + } yield + withNewLocalNameScope: val flags = js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic) val methodIdent = encodeMethodSym(m) val originalName = originalNameOfMethod(m) val jsParams = for { (paramName, paramInfo) <- m.info.paramNamess.flatten.zip(m.info.paramInfoss.flatten) - } yield { + } yield js.ParamDef(freshLocalIdent(paramName), NoOriginalName, toIRType(paramInfo), mutable = false) - } val resultType = toIRType(m.info.resultType) - if (existingPublicStaticMethodNames.contains(methodIdent.name)) { + if (existingPublicStaticMethodNames.contains(methodIdent.name)) report.error( "Unexpected situation: found existing public static method " + s"${methodIdent.name.nameString} in the companion class of " + @@ -840,21 +767,17 @@ class JSCodeGen()(using genCtx: Context) { "the method of the same name in the object." + "Please report this as a bug in the Scala.js support in dotty.", pos) - } js.MethodDef(flags, methodIdent, originalName, jsParams, resultType, Some { genApplyMethod(genLoadModule(moduleClass), m, jsParams.map(_.ref)) })(OptimizerHints.empty, None) - } - } forwarders.toList - } // Generate the fields of a class ------------------------------------------ /** Gen definitions for the fields of a class. */ - private def genClassFields(td: TypeDef): List[js.MemberDef] = { + private def genClassFields(td: TypeDef): List[js.MemberDef] = val classSym = td.symbol.asClass assert(currentClassSym.get == classSym, "genClassFields called with a ClassDef other than the current one") @@ -914,11 +837,10 @@ class JSCodeGen()(using genCtx: Context) { Nil fieldDef :: optionalStaticFieldGetter }).toList - } - def genExposedFieldIRType(f: Symbol): jstpe.Type = { + def genExposedFieldIRType(f: Symbol): jstpe.Type = val tpeEnteringPosterasure = atPhase(elimErasedValueTypePhase)(f.info) - tpeEnteringPosterasure match { + tpeEnteringPosterasure match case tpe: ErasedValueType => /* Here, we must store the field as the boxed representation of * the value class. The default value of that field, as @@ -937,13 +859,11 @@ class JSCodeGen()(using genCtx: Context) { case _ => // Other types are not boxed, so we can initialized them to their true zero. toIRType(f.info) - } - } // Static initializers ----------------------------------------------------- private def genStaticConstructorWithStats(name: MethodName, stats: js.Tree)( - implicit pos: Position): js.MethodDef = { + implicit pos: Position): js.MethodDef = js.MethodDef( js.MemberFlags.empty.withNamespace(js.MemberNamespace.StaticConstructor), js.MethodIdent(name), @@ -952,10 +872,9 @@ class JSCodeGen()(using genCtx: Context) { jstpe.NoType, Some(stats))( OptimizerHints.empty, None) - } private def genRegisterReflectiveInstantiation(sym: Symbol)( - implicit pos: SourcePosition): Option[js.Tree] = { + implicit pos: SourcePosition): Option[js.Tree] = if (isStaticModule(sym)) genRegisterReflectiveInstantiationForModuleClass(sym) else if (sym.is(ModuleClass)) @@ -964,10 +883,9 @@ class JSCodeGen()(using genCtx: Context) { None // scala-js#3227 else genRegisterReflectiveInstantiationForNormalClass(sym) - } private def genRegisterReflectiveInstantiationForModuleClass(sym: Symbol)( - implicit pos: SourcePosition): Option[js.Tree] = { + implicit pos: SourcePosition): Option[js.Tree] = val fqcnArg = js.StringLiteral(sym.fullName.toString) val runtimeClassArg = js.ClassOf(toTypeRef(sym.info)) val loadModuleFunArg = @@ -979,21 +897,20 @@ class JSCodeGen()(using genCtx: Context) { List(fqcnArg, runtimeClassArg, loadModuleFunArg)) Some(stat) - } private def genRegisterReflectiveInstantiationForNormalClass(sym: Symbol)( - implicit pos: SourcePosition): Option[js.Tree] = { + implicit pos: SourcePosition): Option[js.Tree] = val ctors = if (sym.is(Abstract)) Nil else sym.info.member(nme.CONSTRUCTOR).alternatives.map(_.symbol).filter(m => !m.isOneOf(Private | Protected)) - if (ctors.isEmpty) { + if (ctors.isEmpty) None - } else { + else val constructorsInfos = for { ctor <- ctors - } yield { - withNewLocalNameScope { + } yield + withNewLocalNameScope: val (parameterTypes, formalParams, actualParams) = (for { (paramName, paramInfo) <- ctor.info.paramNamess.flatten.zip(ctor.info.paramInfoss.flatten) } yield { @@ -1011,8 +928,6 @@ class JSCodeGen()(using genCtx: Context) { }, Nil) js.JSArrayConstr(List(paramTypesArray, newInstanceFun)) - } - } val fqcnArg = js.StringLiteral(sym.fullName.toString) val runtimeClassArg = js.ClassOf(toTypeRef(sym.info)) @@ -1024,13 +939,11 @@ class JSCodeGen()(using genCtx: Context) { List(fqcnArg, runtimeClassArg, ctorsInfosArg)) Some(stat) - } - } // Constructor of a non-native JS class ------------------------------------ def genJSClassCapturesAndConstructor(constructorTrees: List[DefDef])( - implicit pos: SourcePosition): (List[js.ParamDef], js.JSConstructorDef) = { + implicit pos: SourcePosition): (List[js.ParamDef], js.JSConstructorDef) = /* We need to merge all Scala constructors into a single one because the * IR, like JavaScript, only allows a single one. * @@ -1050,16 +963,15 @@ class JSCodeGen()(using genCtx: Context) { // VarDefs for the parameters of all constructors. val paramVarDefs = for { vparam <- constructorTrees.flatMap(_.paramss.flatten) - } yield { + } yield val sym = vparam.symbol val tpe = toIRType(sym.info) js.VarDef(encodeLocalSym(sym), originalNameOfLocal(sym), tpe, mutable = true, jstpe.zeroOf(tpe))(vparam.span) - } /* organize constructors in a called-by tree * (the implicit root is the primary constructor) */ - val ctorTree = { + val ctorTree = val ctorToChildren = secondaryCtors .groupBy(_.targetCtor) .withDefaultValue(Nil) @@ -1072,35 +984,31 @@ class JSCodeGen()(using genCtx: Context) { * conditional based on the chosen overload. */ var nextOverloadNum = 0 - def subTree[T <: JSCtor](ctor: T): ConstructorTree[T] = { + def subTree[T <: JSCtor](ctor: T): ConstructorTree[T] = val overloadNum = nextOverloadNum nextOverloadNum += 1 val subtrees = ctorToChildren(ctor.sym).map(subTree(_)) new ConstructorTree(overloadNum, ctor, subtrees) - } subTree(primaryCtor) - } /* prepare overload dispatch for all constructors. * as a side-product, we retrieve the capture parameters. */ - val (exports, jsClassCaptures) = { + val (exports, jsClassCaptures) = val exports = List.newBuilder[jsExportsGen.Exported] val jsClassCaptures = List.newBuilder[js.ParamDef] - def add(tree: ConstructorTree[_ <: JSCtor]): Unit = { + def add(tree: ConstructorTree[_ <: JSCtor]): Unit = val (e, c) = genJSClassCtorDispatch(tree.ctor.sym, tree.ctor.paramsAndInfo, tree.overloadNum) exports += e jsClassCaptures ++= c tree.subCtors.foreach(add(_)) - } add(ctorTree) (exports.result(), jsClassCaptures.result()) - } // The name 'constructor' is used for error reporting here val (formalArgs, restParam, overloadDispatchBody) = @@ -1120,9 +1028,8 @@ class JSCodeGen()(using genCtx: Context) { formalArgs, restParam, constructorBody)(OptimizerHints.empty, None) (jsClassCaptures, constructorDef) - } - private def genPrimaryJSClassCtor(dd: DefDef): PrimaryJSCtor = { + private def genPrimaryJSClassCtor(dd: DefDef): PrimaryJSCtor = val sym = dd.symbol val Block(stats, _) = dd.rhs: @unchecked assert(sym.isPrimaryConstructor, s"called with non-primary ctor: $sym") @@ -1138,8 +1045,8 @@ class JSCodeGen()(using genCtx: Context) { * params). We move those after the super constructor call, and are * therefore executed later than for a Scala class. */ - withPerMethodBodyState(sym) { - stats.foreach { + withPerMethodBodyState(sym): + stats.foreach: case tree @ Apply(fun @ Select(Super(This(_), _), _), args) if fun.symbol.isClassConstructor => assert(jsSuperCall.isEmpty, s"Found 2 JS Super calls at ${dd.sourcePos}") @@ -1152,32 +1059,28 @@ class JSCodeGen()(using genCtx: Context) { "Trying to move a local VarDef after the super constructor call of a non-native JS class at " + dd.sourcePos) jsStats += jsStat - } - } assert(jsSuperCall.isDefined, s"Did not find Super call in primary JS construtor at ${dd.sourcePos}") new PrimaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), js.JSConstructorBody(Nil, jsSuperCall.get, jsStats.result())(dd.span)) - } - private def genSecondaryJSClassCtor(dd: DefDef): SplitSecondaryJSCtor = { + private def genSecondaryJSClassCtor(dd: DefDef): SplitSecondaryJSCtor = val sym = dd.symbol assert(!sym.isPrimaryConstructor, s"called with primary ctor $sym") - def flattenBlocks(t: Tree): List[Tree] = t match { + def flattenBlocks(t: Tree): List[Tree] = t match case Block(stats, expr) => (stats :+ expr).flatMap(flattenBlocks) case _ => t :: Nil - } val stats = flattenBlocks(dd.rhs) val beforeThisCall = List.newBuilder[js.Tree] var thisCall: Option[(Symbol, List[js.Tree])] = None val afterThisCall = List.newBuilder[js.Tree] - withPerMethodBodyState(sym) { - stats.foreach { + withPerMethodBodyState(sym): + stats.foreach: case tree @ Apply(fun @ Select(This(_), _), args) if fun.symbol.isClassConstructor => assert(thisCall.isEmpty, @@ -1193,8 +1096,6 @@ class JSCodeGen()(using genCtx: Context) { beforeThisCall += jsStat else afterThisCall += jsStat - } - } assert(thisCall.isDefined, i"could not find the this() call in secondary JS constructor at ${dd.sourcePos}:\n${stats.map(_.show).mkString("\n")}") @@ -1202,19 +1103,17 @@ class JSCodeGen()(using genCtx: Context) { new SplitSecondaryJSCtor(sym, genParamsAndInfo(sym, dd.paramss), beforeThisCall.result(), targetCtor, ctorArgs, afterThisCall.result()) - } private def genParamsAndInfo(ctorSym: Symbol, - vparamss: List[ParamClause]): List[(Symbol, JSParamInfo)] = { + vparamss: List[ParamClause]): List[(Symbol, JSParamInfo)] = implicit val pos: SourcePosition = ctorSym.sourcePos val paramSyms = if (vparamss.isEmpty) Nil else vparamss.head.map(_.symbol) paramSyms.zip(ctorSym.jsParamInfos) - } private def genJSClassCtorDispatch(ctorSym: Symbol, allParamsAndInfos: List[(Symbol, JSParamInfo)], - overloadNum: Int): (jsExportsGen.Exported, List[js.ParamDef]) = { + overloadNum: Int): (jsExportsGen.Exported, List[js.ParamDef]) = implicit val pos: SourcePosition = ctorSym.sourcePos @@ -1241,28 +1140,24 @@ class JSCodeGen()(using genCtx: Context) { val normalInfos = normalParamsAndInfos.map(_._2).toIndexedSeq - val jsExport = new jsExportsGen.Exported(ctorSym, normalInfos) { - def genBody(formalArgsRegistry: jsExportsGen.FormalArgsRegistry): js.Tree = { + val jsExport = new jsExportsGen.Exported(ctorSym, normalInfos): + def genBody(formalArgsRegistry: jsExportsGen.FormalArgsRegistry): js.Tree = val paramAssigns = for { ((param, info), i) <- normalParamsAndInfos.zipWithIndex - } yield { + } yield val rhs = jsExportsGen.genScalaArg(this, i, formalArgsRegistry, info, static = true, captures = captureParamsAndInfos.map(pi => genVarRef(pi._1)))( prevArgsCount => normalParamsAndInfos.take(prevArgsCount).map(pi => genVarRef(pi._1))) js.Assign(genVarRef(param), rhs) - } js.Block(captureAssigns ::: paramAssigns, js.IntLiteral(overloadNum)) - } - } (jsExport, jsClassCaptures) - } /** Generates a JS constructor body based on a constructor tree. */ private def genJSClassCtorBody(overloadVar: js.VarRef, - ctorTree: ConstructorTree[PrimaryJSCtor])(implicit pos: Position): js.JSConstructorBody = { + ctorTree: ConstructorTree[PrimaryJSCtor])(implicit pos: Position): js.JSConstructorBody = /* generates a statement that conditionally executes body iff the chosen * overload is any of the descendants of `tree` (including itself). @@ -1270,32 +1165,29 @@ class JSCodeGen()(using genCtx: Context) { * here we use the property from building the trees, that a set of * descendants always has a range of overload numbers. */ - def ifOverload(tree: ConstructorTree[_], body: js.Tree): js.Tree = body match { + def ifOverload(tree: ConstructorTree[_], body: js.Tree): js.Tree = body match case js.Skip() => js.Skip() case body => val x = overloadVar - val cond = { + val cond = import tree.{lo, hi} - if (lo == hi) { + if (lo == hi) js.BinaryOp(js.BinaryOp.Int_==, js.IntLiteral(lo), x) - } else { + else val lhs = js.BinaryOp(js.BinaryOp.Int_<=, js.IntLiteral(lo), x) val rhs = js.BinaryOp(js.BinaryOp.Int_<=, x, js.IntLiteral(hi)) js.If(lhs, rhs, js.BooleanLiteral(false))(jstpe.BooleanType) - } - } js.If(cond, body, js.Skip())(jstpe.NoType) - } /* preStats / postStats use pre/post order traversal respectively to * generate a topo-sorted sequence of statements. */ def preStats(tree: ConstructorTree[SplitSecondaryJSCtor], - nextParamsAndInfo: List[(Symbol, JSParamInfo)]): js.Tree = { + nextParamsAndInfo: List[(Symbol, JSParamInfo)]): js.Tree = val inner = tree.subCtors.map(preStats(_, tree.ctor.paramsAndInfo)) assert(tree.ctor.ctorArgs.size == nextParamsAndInfo.size, "param count mismatch") @@ -1306,14 +1198,13 @@ class JSCodeGen()(using genCtx: Context) { val captureAssigns = for { ((param, _), arg) <- captureParamsInfosAndArgs - } yield { + } yield js.Assign(genVarRef(param), arg) - } val normalAssigns = for { (((param, info), arg), i) <- normalParamsInfosAndArgs.zipWithIndex - } yield { - val newArg = arg match { + } yield + val newArg = arg match case js.Transient(UndefinedParam) => /* Go full circle: We have ignored the default param getter for * this, we'll create it again. @@ -1340,19 +1231,15 @@ class JSCodeGen()(using genCtx: Context) { prevArgsCount => normalParamsInfosAndArgs.take(prevArgsCount).map(p => genVarRef(p._1._1))) case arg => arg - } js.Assign(genVarRef(param), newArg) - } ifOverload(tree, js.Block( inner ++ tree.ctor.beforeCall ++ captureAssigns ++ normalAssigns)) - } - def postStats(tree: ConstructorTree[SplitSecondaryJSCtor]): js.Tree = { + def postStats(tree: ConstructorTree[SplitSecondaryJSCtor]): js.Tree = val inner = tree.subCtors.map(postStats(_)) ifOverload(tree, js.Block(tree.ctor.afterCall ++ inner)) - } val primaryCtor = ctorTree.ctor val secondaryCtorTrees = ctorTree.subCtors @@ -1362,18 +1249,15 @@ class JSCodeGen()(using genCtx: Context) { primaryCtor.body, secondaryCtorTrees.map(postStats(_)) ) - } private def wrapJSCtorBody(before: List[js.Tree], body: js.JSConstructorBody, - after: List[js.Tree]): js.JSConstructorBody = { + after: List[js.Tree]): js.JSConstructorBody = js.JSConstructorBody(before ::: body.beforeSuper, body.superCall, body.afterSuper ::: after)(body.pos) - } - private sealed trait JSCtor { + private sealed trait JSCtor: val sym: Symbol val paramsAndInfo: List[(Symbol, JSParamInfo)] - } private class PrimaryJSCtor(val sym: Symbol, val paramsAndInfo: List[(Symbol, JSParamInfo)], @@ -1387,17 +1271,16 @@ class JSCodeGen()(using genCtx: Context) { private class ConstructorTree[Ctor <: JSCtor]( val overloadNum: Int, val ctor: Ctor, - val subCtors: List[ConstructorTree[SplitSecondaryJSCtor]]) { + val subCtors: List[ConstructorTree[SplitSecondaryJSCtor]]): val lo: Int = overloadNum val hi: Int = subCtors.lastOption.fold(lo)(_.hi) assert(lo <= hi, "bad overload range") - } // Generate a method ------------------------------------------------------- /** Generates the JSNativeMemberDef. */ - def genJSNativeMemberDef(tree: ValOrDefDef): js.JSNativeMemberDef = { + def genJSNativeMemberDef(tree: ValOrDefDef): js.JSNativeMemberDef = implicit val pos = tree.span val sym = tree.symbol @@ -1405,15 +1288,12 @@ class JSCodeGen()(using genCtx: Context) { val methodName = encodeJSNativeMemberSym(sym) val jsNativeLoadSpec = computeJSNativeLoadSpecOfValDef(sym) js.JSNativeMemberDef(flags, methodName, jsNativeLoadSpec) - } - private def genMethod(dd: DefDef): Option[js.MethodDef] = { + private def genMethod(dd: DefDef): Option[js.MethodDef] = withScopedVars( localNames := new LocalNameGenerator - ) { + ): genMethodWithCurrentLocalNameScope(dd) - } - } /** Gen JS code for a method definition in a class or in an impl class. * On the JS side, method names are mangled to encode the full signature @@ -1428,7 +1308,7 @@ class JSCodeGen()(using genCtx: Context) { * * Other (normal) methods are emitted with `genMethodBody()`. */ - private def genMethodWithCurrentLocalNameScope(dd: DefDef): Option[js.MethodDef] = { + private def genMethodWithCurrentLocalNameScope(dd: DefDef): Option[js.MethodDef] = implicit val pos = dd.span val sym = dd.symbol val vparamss = dd.termParamss @@ -1452,16 +1332,16 @@ class JSCodeGen()(using genCtx: Context) { * do not need to worry about non-constructor members of native JS types, * since for those, the entire member list is ignored in `genJSClassData`. */ - def isIgnorableDefaultParam: Boolean = { - sym.name.is(DefaultGetterName) && sym.owner.is(ModuleClass) && { + def isIgnorableDefaultParam: Boolean = + sym.name.is(DefaultGetterName) && sym.owner.is(ModuleClass) `&&`: val info = new DefaultParamInfo(sym) - if (info.isForConstructor) { + if (info.isForConstructor) /* This is a default accessor for a constructor parameter. Check * whether the attached constructor is a native JS constructor, * which is the case iff the linked class is a native JS type. */ info.constructorOwner.hasAnnotation(jsdefn.JSNativeAnnot) - } else { + else /* #4553 We need to ignore default accessors for JS native defs. * However, because Scala.js <= 1.7.0 actually emitted code calling * those accessors, we must keep default accessors that would @@ -1469,19 +1349,14 @@ class JSCodeGen()(using genCtx: Context) { * that are `= js.native`. */ !sym.owner.isJSType && - info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) && { - dd.rhs match { + info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) `&&`: + dd.rhs match case MaybeAsInstanceOf(Apply(fun, _)) => fun.symbol == jsdefn.JSPackage_native case _ => false - } - } - } - } - } - withPerMethodBodyState(sym) { + withPerMethodBodyState(sym): assert(vparamss.isEmpty || vparamss.tail.isEmpty, "Malformed parameter list: " + vparamss) val params = if (vparamss.isEmpty) Nil else vparamss.head.map(_.symbol) @@ -1491,19 +1366,19 @@ class JSCodeGen()(using genCtx: Context) { def jsParams = params.map(genParamDef(_)) - if (primitives.isPrimitive(sym)) { + if (primitives.isPrimitive(sym)) None - } else if (sym.is(Deferred) && currentClassSym.isNonNativeJSClass) { + else if (sym.is(Deferred) && currentClassSym.isNonNativeJSClass) // scala-js/#4409: Do not emit abstract methods in non-native JS classes None - } else if (sym.is(Deferred)) { + else if (sym.is(Deferred)) Some(js.MethodDef(js.MemberFlags.empty, methodName, originalName, jsParams, toIRType(patchedResultType(sym)), None)( OptimizerHints.empty, None)) - } else if (isIgnorableDefaultParam) { + else if (isIgnorableDefaultParam) // #11592 None - } else if (sym.is(Bridge) && sym.name.is(DefaultGetterName) && currentClassSym.isNonNativeJSClass) { + else if (sym.is(Bridge) && sym.name.is(DefaultGetterName) && currentClassSym.isNonNativeJSClass) /* #12572 Bridges for default accessors in non-native JS classes must not be emitted, * because they call another default accessor, making their entire body an * that cannot be eliminated. @@ -1511,54 +1386,45 @@ class JSCodeGen()(using genCtx: Context) { * JSExportsGen.defaultGetterDenot(). */ None - } else /*if (sym.isClassConstructor && isHijackedBoxedClass(sym.owner)) { + else /*if (sym.isClassConstructor && isHijackedBoxedClass(sym.owner)) { None - } else*/ { + } else*/ /*def isTraitImplForwarder = dd.rhs match { case app: Apply => foreignIsImplClass(app.symbol.owner) case _ => false }*/ - val shouldMarkInline = { + val shouldMarkInline = sym.hasAnnotation(jsdefn.InlineAnnot) || sym.isAnonymousFunction - } - val shouldMarkNoinline = { + val shouldMarkNoinline = sym.hasAnnotation(jsdefn.NoinlineAnnot) /*&& !isTraitImplForwarder*/ - } - val optimizerHints = { + val optimizerHints = OptimizerHints.empty .withInline(shouldMarkInline) .withNoinline(shouldMarkNoinline) - } - val methodDef = { - if (sym.isClassConstructor) { + val methodDef = + if (sym.isClassConstructor) val namespace = js.MemberNamespace.Constructor js.MethodDef(js.MemberFlags.empty.withNamespace(namespace), methodName, originalName, jsParams, jstpe.NoType, Some(genStat(rhs)))( optimizerHints, None) - } else { - val namespace = if (isMethodStaticInIR(sym)) { + else + val namespace = if (isMethodStaticInIR(sym)) if (sym.isPrivate) js.MemberNamespace.PrivateStatic else js.MemberNamespace.PublicStatic - } else { + else if (sym.isPrivate) js.MemberNamespace.Private else js.MemberNamespace.Public - } val resultIRType = toIRType(patchedResultType(sym)) genMethodDef(namespace, methodName, originalName, params, resultIRType, rhs, optimizerHints) - } - } Some(methodDef) - } - } - } /** Generates the MethodDef of a (non-constructor) method * @@ -1572,25 +1438,24 @@ class JSCodeGen()(using genCtx: Context) { */ private def genMethodDef(namespace: js.MemberNamespace, methodName: js.MethodIdent, originalName: OriginalName, paramsSyms: List[Symbol], resultIRType: jstpe.Type, - tree: Tree, optimizerHints: OptimizerHints): js.MethodDef = { + tree: Tree, optimizerHints: OptimizerHints): js.MethodDef = implicit val pos = tree.span val jsParams = paramsSyms.map(genParamDef(_)) - def genBody() = localNames.makeLabeledIfRequiresEnclosingReturn(resultIRType) { + def genBody() = localNames.makeLabeledIfRequiresEnclosingReturn(resultIRType): if (resultIRType == jstpe.NoType) genStat(tree) else genExpr(tree) - } - if (namespace.isStatic || !currentClassSym.isNonNativeJSClass) { + if (namespace.isStatic || !currentClassSym.isNonNativeJSClass) val flags = js.MemberFlags.empty.withNamespace(namespace) js.MethodDef(flags, methodName, originalName, jsParams, resultIRType, Some(genBody()))( optimizerHints, None) - } else { + else val thisLocalIdent = freshLocalIdent("this") withScopedVars( thisLocalVarIdent := Some(thisLocalIdent) - ) { + ): val staticNamespace = if (namespace.isPrivate) js.MemberNamespace.PrivateStatic else js.MemberNamespace.PublicStatic @@ -1602,9 +1467,6 @@ class JSCodeGen()(using genCtx: Context) { js.MethodDef(flags, methodName, originalName, thisParamDef :: jsParams, resultIRType, Some(genBody()))( optimizerHints, None) - } - } - } // ParamDefs --------------------------------------------------------------- @@ -1617,50 +1479,44 @@ class JSCodeGen()(using genCtx: Context) { private def genParamDef(sym: Symbol, pos: Position): js.ParamDef = genParamDef(sym, toIRType(sym.info), pos) - private def genParamDef(sym: Symbol, ptpe: jstpe.Type, pos: Position): js.ParamDef = { + private def genParamDef(sym: Symbol, ptpe: jstpe.Type, pos: Position): js.ParamDef = js.ParamDef(encodeLocalSym(sym)(implicitly, pos, implicitly), originalNameOfLocal(sym), ptpe, mutable = false)(pos) - } // Generate statements and expressions ------------------------------------- /** Gen JS code for a tree in statement position (in the IR). */ - private def genStat(tree: Tree): js.Tree = { + private def genStat(tree: Tree): js.Tree = exprToStat(genStatOrExpr(tree, isStat = true)) - } /** Turn a JavaScript expression of type Unit into a statement */ - private def exprToStat(tree: js.Tree): js.Tree = { + private def exprToStat(tree: js.Tree): js.Tree = /* Any JavaScript expression is also a statement, but at least we get rid * of some pure expressions that come from our own codegen. */ implicit val pos = tree.pos - tree match { + tree match case js.Block(stats :+ expr) => js.Block(stats :+ exprToStat(expr)) case _:js.Literal | _:js.This | _:js.VarRef => js.Skip() case _ => tree - } - } /** Gen JS code for a tree in expression position (in the IR). */ - private def genExpr(tree: Tree): js.Tree = { + private def genExpr(tree: Tree): js.Tree = val result = genStatOrExpr(tree, isStat = false) assert(result.tpe != jstpe.NoType, s"genExpr($tree) returned a tree with type NoType at pos ${tree.span}") result - } - def genExpr(name: JSName)(implicit pos: SourcePosition): js.Tree = name match { + def genExpr(name: JSName)(implicit pos: SourcePosition): js.Tree = name match case JSName.Literal(name) => js.StringLiteral(name) case JSName.Computed(sym) => genComputedJSName(sym) - } - private def genComputedJSName(sym: Symbol)(implicit pos: SourcePosition): js.Tree = { + private def genComputedJSName(sym: Symbol)(implicit pos: SourcePosition): js.Tree = /* By construction (i.e. restriction in PrepJSInterop), we know that sym * must be a static method. * Therefore, at this point, we can invoke it by loading its owner and @@ -1669,23 +1525,21 @@ class JSCodeGen()(using genCtx: Context) { def moduleOrGlobalScope = genLoadModuleOrGlobalScope(sym.owner) def module = genLoadModule(sym.owner) - if (sym.owner.isJSType) { + if (sym.owner.isJSType) if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) genApplyJSMethodGeneric(sym, moduleOrGlobalScope, args = Nil, isStat = false) else genApplyJSClassMethod(module, sym, arguments = Nil) - } else { + else genApplyMethod(module, sym, arguments = Nil) - } - } /** Gen JS code for a tree in expression position (in the IR) or the * global scope. */ - def genExprOrGlobalScope(tree: Tree): MaybeGlobalScope = { + def genExprOrGlobalScope(tree: Tree): MaybeGlobalScope = implicit def pos: SourcePosition = tree.sourcePos - tree match { + tree match case _: This => val sym = tree.symbol if (sym != currentClassSym.get && sym.is(Module)) @@ -1695,12 +1549,11 @@ class JSCodeGen()(using genCtx: Context) { case _:Ident | _:Select => val sym = tree.symbol - if (sym.is(Module)) { + if (sym.is(Module)) assert(!sym.is(PackageClass), "Cannot use package as value: " + tree) genLoadModuleOrGlobalScope(sym) - } else { + else MaybeGlobalScope.NotGlobalScope(genExpr(tree)) - } case Apply(fun, _) => if (fun.symbol == jsdefn.JSDynamic_global) @@ -1710,28 +1563,26 @@ class JSCodeGen()(using genCtx: Context) { case _ => MaybeGlobalScope.NotGlobalScope(genExpr(tree)) - } - } /** Gen JS code for a tree in statement or expression position (in the IR). * * This is the main transformation method. Each node of the Scala AST * is transformed into an equivalent portion of the JS AST. */ - private def genStatOrExpr(tree: Tree, isStat: Boolean): js.Tree = { + private def genStatOrExpr(tree: Tree, isStat: Boolean): js.Tree = implicit val pos: SourcePosition = tree.sourcePos report.debuglog(" " + tree) report.debuglog("") - tree match { + tree match /** Local val or var declaration */ case tree @ ValDef(name, _, _) => val sym = tree.symbol val rhs = tree.rhs val rhsTree = genExpr(rhs) - rhsTree match { + rhsTree match case js.Transient(UndefinedParam) => /* This is an intermediate assignment for default params on a * js.Any. Add the symbol to the corresponding set to inform @@ -1742,7 +1593,6 @@ class JSCodeGen()(using genCtx: Context) { case _ => js.VarDef(encodeLocalSym(sym), originalNameOfLocal(sym), toIRType(sym.info), sym.is(Mutable), rhsTree) - } case If(cond, thenp, elsep) => val tpe = @@ -1795,40 +1645,38 @@ class JSCodeGen()(using genCtx: Context) { case Select(qualifier, _) => val sym = tree.symbol - if (sym.is(Module)) { + if (sym.is(Module)) assert(!sym.is(Package), "Cannot use package as value: " + tree) genLoadModule(sym) - } else if (sym.is(JavaStatic)) { + else if (sym.is(JavaStatic)) genLoadStaticField(sym) - } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { + else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) genJSNativeMemberSelect(tree) - } else { + else val (field, boxed) = genAssignableField(sym, qualifier) if (boxed) unbox(field, atPhase(elimErasedValueTypePhase)(sym.info)) else field - } case tree: Ident => desugarIdent(tree).fold[js.Tree] { val sym = tree.symbol assert(!sym.is(Package), "Cannot use package as value: " + tree) - if (sym.is(Module)) { + if (sym.is(Module)) genLoadModule(sym) - } else if (undefinedDefaultParams.contains(sym)) { + else if (undefinedDefaultParams.contains(sym)) /* This is a default parameter whose assignment was moved to * a local variable. Put an undefined param instead. */ js.Transient(UndefinedParam) - } else { + else genVarRef(sym) - } } { select => genStatOrExpr(select, isStat) } case Literal(value) => import Constants._ - value.tag match { + value.tag match case UnitTag => js.Skip() case BooleanTag => @@ -1853,35 +1701,31 @@ class JSCodeGen()(using genCtx: Context) { js.Null() case ClazzTag => genClassConstant(value.typeValue) - } case Block(stats, expr) => // #15419 Collapse { ; BoxedUnit } to val genStatsAndExpr0 = stats.map(genStat(_)) :+ genStatOrExpr(expr, isStat) - val genStatsAndExpr = genStatsAndExpr0 match { + val genStatsAndExpr = genStatsAndExpr0 match case (undefParam @ js.Transient(UndefinedParam)) :: js.Undefined() :: Nil => undefParam :: Nil case _ => genStatsAndExpr0 - } js.Block(genStatsAndExpr) case Typed(expr, _) => - expr match { + expr match case _: Super => genThis() case _ => genExpr(expr) - } case Assign(lhs0, rhs) => val sym = lhs0.symbol if (sym.is(JavaStaticTerm) && sym.source != ctx.compilationUnit.source) throw new FatalError(s"Assignment to static member ${sym.fullName} not supported") def genRhs = genExpr(rhs) - val lhs = lhs0 match { + val lhs = lhs0 match case lhs: Ident => desugarIdent(lhs).getOrElse(lhs) case lhs => lhs - } - lhs match { + lhs match case lhs: Select => val qualifier = lhs.qualifier @@ -1894,7 +1738,7 @@ class JSCodeGen()(using genCtx: Context) { /*if (!sym.is(Mutable) && !ctorAssignment) throw new FatalError(s"Assigning to immutable field ${sym.fullName} at $pos")*/ - if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { + if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) /* This is an assignment to a @js.native field. Since we reject * `@js.native var`s as compile errors, this can only happen in * the constructor of the enclosing object. @@ -1902,19 +1746,16 @@ class JSCodeGen()(using genCtx: Context) { * emitted at all. */ js.Skip() - } else { + else val (field, boxed) = genAssignableField(sym, qualifier) - if (boxed) { + if (boxed) val genBoxedRhs = box(genRhs, atPhase(elimErasedValueTypePhase)(sym.info)) js.Assign(field, genBoxedRhs) - } else { + else js.Assign(field, genRhs) - } - } case _ => js.Assign(genVarRef(sym), genRhs) - } /** Array constructor */ case javaSeqLiteral: JavaSeqLiteral => @@ -1933,27 +1774,24 @@ class JSCodeGen()(using genCtx: Context) { case _ => throw new FatalError("Unexpected tree in genExpr: " + tree + "/" + tree.getClass + " at: " + (tree.span: Position)) - } - } // end of genStatOrExpr() + // end of genStatOrExpr() - private def qualifierOf(fun: Tree): Tree = fun match { + private def qualifierOf(fun: Tree): Tree = fun match case fun: Ident => - fun.tpe match { + fun.tpe match case TermRef(prefix: TermRef, _) => tpd.ref(prefix) case TermRef(prefix: ThisType, _) => tpd.This(prefix.cls) - } case Select(qualifier, _) => qualifier case TypeApply(fun, _) => qualifierOf(fun) - } /** Gen JS this of the current class. * Normally encoded straightforwardly as a JS this. * But must be replaced by the `thisLocalVarIdent` local variable if there * is one. */ - private def genThis()(implicit pos: Position): js.Tree = { + private def genThis()(implicit pos: Position): js.Tree = /*if (tryingToGenMethodAsJSFunction) { throw new CancelGenMethodAsJSFunction( "Trying to generate `this` inside the body") @@ -1964,7 +1802,6 @@ class JSCodeGen()(using genCtx: Context) { } { thisLocalIdent => js.VarRef(thisLocalIdent)(currentThisType) } - } /** Gen IR code for a `try..catch` or `try..finally` block. * @@ -1990,7 +1827,7 @@ class JSCodeGen()(using genCtx: Context) { * exceptions (which do not extend `Throwable`) as wrapped in a * `js.JavaScriptException`. */ - private def genTry(tree: Try, isStat: Boolean): js.Tree = { + private def genTry(tree: Try, isStat: Boolean): js.Tree = implicit val pos: SourcePosition = tree.sourcePos val Try(block, catches, finalizer) = tree @@ -2004,36 +1841,32 @@ class JSCodeGen()(using genCtx: Context) { if (catches.isEmpty) blockAST else genTryCatch(blockAST, catches, resultType, isStat) - genStat(finalizer) match { + genStat(finalizer) match case js.Skip() => handled case ast => js.TryFinally(handled, ast) - } - } private def genTryCatch(body: js.Tree, catches: List[CaseDef], resultType: jstpe.Type, - isStat: Boolean)(implicit pos: SourcePosition): js.Tree = { + isStat: Boolean)(implicit pos: SourcePosition): js.Tree = val exceptIdent = freshLocalIdent("e") val origExceptVar = js.VarRef(exceptIdent)(jstpe.AnyType) val mightCatchJavaScriptException = catches.exists { caseDef => - caseDef.pat match { + caseDef.pat match case Typed(Ident(nme.WILDCARD), tpt) => isMaybeJavaScriptException(tpt.tpe) case Ident(nme.WILDCARD) => true case pat @ Bind(_, _) => isMaybeJavaScriptException(pat.symbol.info) - } } - val (exceptValDef, exceptVar) = if (mightCatchJavaScriptException) { + val (exceptValDef, exceptVar) = if (mightCatchJavaScriptException) val valDef = js.VarDef(freshLocalIdent("e"), NoOriginalName, encodeClassType(defn.ThrowableClass), mutable = false, js.WrapAsThrowable(origExceptVar)) (valDef, valDef.ref) - } else { + else (js.Skip(), origExceptVar) - } val elseHandler: js.Tree = js.Throw(origExceptVar) @@ -2066,17 +1899,15 @@ class JSCodeGen()(using genCtx: Context) { }) // Generate the test - if (tpe =:= defn.ThrowableType) { + if (tpe =:= defn.ThrowableType) bodyWithBoundVar - } else { + else val cond = genIsInstanceOf(exceptVar, tpe) js.If(cond, bodyWithBoundVar, elsep)(resultType) - } } js.TryCatch(body, exceptIdent, NoOriginalName, js.Block(exceptValDef, handler))(resultType) - } /** Gen JS code for an Apply node (method call) * @@ -2084,7 +1915,7 @@ class JSCodeGen()(using genCtx: Context) { * calls, super calls, constructor calls, isInstanceOf/asInstanceOf, * primitives, JS calls, etc. They are further dispatched in here. */ - private def genApply(tree: Apply, isStat: Boolean): js.Tree = { + private def genApply(tree: Apply, isStat: Boolean): js.Tree = implicit val pos = tree.span val args = tree.args val sym = tree.fun.symbol @@ -2105,35 +1936,31 @@ class JSCodeGen()(using genCtx: Context) { * not exposed). We also need to handle non-constructor members of native * JS types. */ - def isJSDefaultParam: Boolean = { - sym.name.is(DefaultGetterName) && { + def isJSDefaultParam: Boolean = + sym.name.is(DefaultGetterName) `&&`: val info = new DefaultParamInfo(sym) - if (info.isForConstructor) { + if (info.isForConstructor) /* This is a default accessor for a constructor parameter. Check * whether the attached constructor is a JS constructor, which is * the case iff the linked class is a JS type. */ info.constructorOwner.isJSType - } else { - if (sym.owner.isJSType) { + else + if (sym.owner.isJSType) /* The default accessor is in a JS type. It is a JS default * param iff the enclosing class is native or the attached method * is exposed. */ !sym.owner.isNonNativeJSClass || info.attachedMethod.isJSExposed - } else { + else /* The default accessor is in a Scala type. It is a JS default * param iff the attached method is a native JS def. This can * only happen if the owner is a module class, which we test * first as a fast way out. */ sym.owner.is(ModuleClass) && info.attachedMethod.hasAnnotation(jsdefn.JSNativeAnnot) - } - } - } - } - tree.fun match { + tree.fun match case _ if isJSDefaultParam => js.Transient(UndefinedParam) @@ -2144,21 +1971,18 @@ class JSCodeGen()(using genCtx: Context) { genApplyNew(tree) case _ => - if (primitives.isPrimitive(tree)) { + if (primitives.isPrimitive(tree)) genPrimitiveOp(tree, isStat) - } else if (Erasure.Boxing.isBox(sym)) { + else if (Erasure.Boxing.isBox(sym)) // Box a primitive value (cannot be Unit) val arg = args.head makePrimitiveBox(genExpr(arg), arg.tpe) - } else if (Erasure.Boxing.isUnbox(sym)) { + else if (Erasure.Boxing.isUnbox(sym)) // Unbox a primitive value (cannot be Unit) val arg = args.head makePrimitiveUnbox(genExpr(arg), tree.tpe) - } else { + else genNormalApply(tree, isStat) - } - } - } /** Gen JS code for a super call, of the form Class.super[mix].fun(args). * @@ -2170,17 +1994,17 @@ class JSCodeGen()(using genCtx: Context) { * two classes above the current one is invalid in Scala, the `mix` item is * irrelevant. */ - private def genSuperCall(tree: Apply, isStat: Boolean): js.Tree = { + private def genSuperCall(tree: Apply, isStat: Boolean): js.Tree = implicit val pos = tree.span val Apply(fun @ Select(sup @ Super(qual, _), _), args) = tree: @unchecked val sym = fun.symbol - if (sym == defn.Any_getClass) { + if (sym == defn.Any_getClass) // The only primitive that is also callable as super call js.GetClass(genThis()) - } else if (currentClassSym.isNonNativeJSClass) { + else if (currentClassSym.isNonNativeJSClass) genJSSuperCall(tree, isStat) - } else { + else /* #3013 `qual` can be `this.$outer()` in some cases since Scala 2.12, * so we call `genExpr(qual)`, not just `genThis()`. */ @@ -2189,17 +2013,14 @@ class JSCodeGen()(using genCtx: Context) { // Initialize the module instance just after the super constructor call. if (isStaticModule(currentClassSym) && !isModuleInitialized.get.value && - currentMethodSym.get.isClassConstructor) { + currentMethodSym.get.isClassConstructor) isModuleInitialized.get.value = true val className = encodeClassName(currentClassSym) val thisType = jstpe.ClassType(className) val initModule = js.StoreModule(className, js.This()(thisType)) js.Block(superCall, initModule) - } else { + else superCall - } - } - } /** Gen JS code for a constructor call (new). * Further refined into: @@ -2210,7 +2031,7 @@ class JSCodeGen()(using genCtx: Context) { * * new Array * * regular new */ - private def genApplyNew(tree: Apply): js.Tree = { + private def genApplyNew(tree: Apply): js.Tree = implicit val pos: SourcePosition = tree.sourcePos val Apply(fun @ Select(New(tpt), nme.CONSTRUCTOR), args) = tree: @unchecked @@ -2222,30 +2043,27 @@ class JSCodeGen()(using genCtx: Context) { val clsSym = tpe.typeSymbol - if (isHijackedClass(clsSym)) { + if (isHijackedClass(clsSym)) genNewHijackedClass(clsSym, ctor, args.map(genExpr)) - } else /*if (translatedAnonFunctions contains tpe.typeSymbol) { + else /*if (translatedAnonFunctions contains tpe.typeSymbol) { val functionMaker = translatedAnonFunctions(tpe.typeSymbol) functionMaker(args map genExpr) - } else*/ if (clsSym.isJSType) { + } else*/ if (clsSym.isJSType) genNewJSClass(tree) - } else { - toTypeRef(tpe) match { + else + toTypeRef(tpe) match case jstpe.ClassRef(className) => js.New(className, encodeMethodSym(ctor), genActualArgs(ctor, args)) case other => throw new FatalError(s"Non ClassRef cannot be instantiated: $other") - } - } - } /** Gen JS code for a call to a constructor of a hijacked class. * Reroute them to the `new` method with the same signature in the * companion object. */ private def genNewHijackedClass(clazz: Symbol, ctor: Symbol, - args: List[js.Tree])(implicit pos: SourcePosition): js.Tree = { + args: List[js.Tree])(implicit pos: SourcePosition): js.Tree = val className = encodeClassName(clazz) val initName = encodeMethodSym(ctor).name @@ -2255,10 +2073,9 @@ class JSCodeGen()(using genCtx: Context) { js.ApplyStatic(js.ApplyFlags.empty, className, newMethodIdent, args)( jstpe.ClassType(className)) - } /** Gen JS code for a new of a JS class (subclass of `js.Any`). */ - private def genNewJSClass(tree: Apply): js.Tree = { + private def genNewJSClass(tree: Apply): js.Tree = acquireContextualJSClassValue { jsClassValue => implicit val pos: Position = tree.span @@ -2292,7 +2109,6 @@ class JSCodeGen()(using genCtx: Context) { js.JSNew(jsClassVal, genArgs) } } - } /** Generate an instance of an anonymous (non-lambda) JS class inline * @@ -2302,19 +2118,17 @@ class JSCodeGen()(using genCtx: Context) { * @param pos Position of the original New tree */ private def genNewAnonJSClass(sym: Symbol, jsSuperClassValue: js.Tree, args: List[js.Tree])( - implicit pos: Position): js.Tree = { + implicit pos: Position): js.Tree = assert(sym.isAnonymousClass, s"Generating AnonJSClassNew of non anonymous JS class ${sym.fullName}") // Find the TypeDef for this anonymous class and generate it val typeDef = consumeLazilyGeneratedAnonClass(sym) - val originalClassDef = resetAllScopedVars { + val originalClassDef = resetAllScopedVars: withScopedVars( currentClassSym := sym - ) { + ): genNonNativeJSClass(typeDef) - } - } // Partition class members. val privateFieldDefs = mutable.ListBuffer.empty[js.FieldDef] @@ -2322,7 +2136,7 @@ class JSCodeGen()(using genCtx: Context) { val instanceMembers = mutable.ListBuffer.empty[js.MemberDef] var constructor: Option[js.JSConstructorDef] = None - originalClassDef.memberDefs.foreach { + originalClassDef.memberDefs.foreach: case fdef: js.FieldDef => privateFieldDefs += fdef @@ -2347,13 +2161,12 @@ class JSCodeGen()(using genCtx: Context) { case nativeMemberDef: js.JSNativeMemberDef => throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) - } assert(originalClassDef.topLevelExportDefs.isEmpty, "Found top-level exports in anonymous JS class at " + pos) // Make new class def with static members - val newClassDef = { + val newClassDef = implicit val pos = originalClassDef.pos val parent = js.ClassIdent(jsNames.ObjectClass) js.ClassDef(originalClassDef.name, originalClassDef.originalName, @@ -2361,18 +2174,15 @@ class JSCodeGen()(using genCtx: Context) { jsSuperClass = None, jsNativeLoadSpec = None, classDefMembers.toList, Nil)( originalClassDef.optimizerHints) - } generatedClasses += newClassDef // Construct inline class definition - val jsClassCaptures = originalClassDef.jsClassCaptures.getOrElse { + val jsClassCaptures = originalClassDef.jsClassCaptures.getOrElse: throw new AssertionError(s"no class captures for anonymous JS class at $pos") - } - val js.JSConstructorDef(_, ctorParams, ctorRestParam, ctorBody) = constructor.getOrElse { + val js.JSConstructorDef(_, ctorParams, ctorRestParam, ctorBody) = constructor.getOrElse: throw new AssertionError("No ctor found") - } assert(ctorParams.isEmpty && ctorRestParam.isEmpty, s"non-empty constructor params for anonymous JS class at $pos") @@ -2394,7 +2204,7 @@ class JSCodeGen()(using genCtx: Context) { def memberLambda(params: List[js.ParamDef], restParam: Option[js.ParamDef], body: js.Tree)(implicit pos: ir.Position): js.Closure = js.Closure(arrow = false, captureParams = Nil, params, restParam, body, captureValues = Nil) - val memberDefinitions0 = instanceMembers.toList.map { + val memberDefinitions0 = instanceMembers.toList.map: case fdef: js.FieldDef => throw new AssertionError("unexpected FieldDef") @@ -2432,11 +2242,10 @@ class JSCodeGen()(using genCtx: Context) { case nativeMemberDef: js.JSNativeMemberDef => throw new FatalError("illegal native JS member in JS class at " + nativeMemberDef.pos) - } - val memberDefinitions = if (privateFieldDefs.isEmpty) { + val memberDefinitions = if (privateFieldDefs.isEmpty) memberDefinitions0 - } else { + else /* Private fields, declared in FieldDefs, are stored in a separate * object, itself stored as a non-enumerable field of the `selfRef`. * The name of that field is retrieved at @@ -2451,13 +2260,12 @@ class JSCodeGen()(using genCtx: Context) { * - Get around the fact that abstract JS types cannot declare * FieldDefs (#3777). */ - val fieldsObjValue = { + val fieldsObjValue = js.JSObjectConstr(privateFieldDefs.toList.map { fdef => implicit val pos = fdef.pos js.StringLiteral(fdef.name.name.nameString) -> jstpe.zeroOf(fdef.ftpe) }) - } - val definePrivateFieldsObj = { + val definePrivateFieldsObj = /* Object.defineProperty(selfRef, privateFieldsSymbol, { * value: fieldsObjValue * }); @@ -2475,33 +2283,29 @@ class JSCodeGen()(using genCtx: Context) { )) ) ) - } definePrivateFieldsObj :: memberDefinitions0 - } // Transform the constructor body. - val inlinedCtorStats: List[js.Tree] = { + val inlinedCtorStats: List[js.Tree] = val beforeSuper = ctorBody.beforeSuper - val superCall = { + val superCall = implicit val pos = ctorBody.superCall.pos val js.JSSuperConstructorCall(args) = ctorBody.superCall - val newTree = { + val newTree = val ident = originalClassDef.superClass.getOrElse(throw new FatalError("No superclass")) if (args.isEmpty && ident.name == JSObjectClassName) js.JSObjectConstr(Nil) else js.JSNew(jsSuperClassRef, args) - } val selfVarDef = js.VarDef(selfName, thisOriginalName, jstpe.AnyType, mutable = false, newTree) selfVarDef :: memberDefinitions - } // After the super call, substitute `selfRef` for `This()` val afterSuper = new ir.Transformers.Transformer { - override def transform(tree: js.Tree, isStat: Boolean): js.Tree = tree match { + override def transform(tree: js.Tree, isStat: Boolean): js.Tree = tree match case js.This() => selfRef(tree.pos) @@ -2512,19 +2316,16 @@ class JSCodeGen()(using genCtx: Context) { case tree => super.transform(tree, isStat) - } }.transformStats(ctorBody.afterSuper) beforeSuper ::: superCall ::: afterSuper - } val closure = js.Closure(arrow = true, jsClassCaptures, Nil, None, js.Block(inlinedCtorStats, selfRef), jsSuperClassValue :: args) js.JSFunctionApply(closure, Nil) - } /** Gen JS code for a primitive method call. */ - private def genPrimitiveOp(tree: Apply, isStat: Boolean): js.Tree = { + private def genPrimitiveOp(tree: Apply, isStat: Boolean): js.Tree = import dotty.tools.backend.ScalaPrimitivesOps._ implicit val pos = tree.span @@ -2552,19 +2353,16 @@ class JSCodeGen()(using genCtx: Context) { genJSPrimitive(tree, args, code, isStat) else throw new FatalError(s"Unknown primitive: ${tree.symbol.fullName} at: $pos") - } /** Gen JS code for a simple operation (arithmetic, logical, or comparison) */ - private def genSimpleOp(tree: Apply, args: List[Tree], code: Int): js.Tree = { - args match { + private def genSimpleOp(tree: Apply, args: List[Tree], code: Int): js.Tree = + args match case List(arg) => genSimpleUnaryOp(tree, arg, code) case List(lhs, rhs) => genSimpleBinaryOp(tree, lhs, rhs, code) case _ => throw new FatalError("Incorrect arity for primitive") - } - } /** Gen JS code for a simple unary operation. */ - private def genSimpleUnaryOp(tree: Apply, arg: Tree, code: Int): js.Tree = { + private def genSimpleUnaryOp(tree: Apply, arg: Tree, code: Int): js.Tree = import dotty.tools.backend.ScalaPrimitivesOps._ implicit val pos = tree.span @@ -2572,12 +2370,12 @@ class JSCodeGen()(using genCtx: Context) { val resultIRType = toIRType(tree.tpe) val genArg = adaptPrimitive(genExpr(arg), resultIRType) - (code: @switch) match { + (code: @switch) match case POS => genArg case NEG => - (resultIRType: @unchecked) match { + (resultIRType: @unchecked) match case jstpe.IntType => js.BinaryOp(js.BinaryOp.Int_-, js.IntLiteral(0), genArg) case jstpe.LongType => @@ -2586,26 +2384,22 @@ class JSCodeGen()(using genCtx: Context) { js.BinaryOp(js.BinaryOp.Float_*, js.FloatLiteral(-1.0f), genArg) case jstpe.DoubleType => js.BinaryOp(js.BinaryOp.Double_*, js.DoubleLiteral(-1.0), genArg) - } case NOT => - (resultIRType: @unchecked) match { + (resultIRType: @unchecked) match case jstpe.IntType => js.BinaryOp(js.BinaryOp.Int_^, js.IntLiteral(-1), genArg) case jstpe.LongType => js.BinaryOp(js.BinaryOp.Long_^, js.LongLiteral(-1), genArg) - } case ZNOT => js.UnaryOp(js.UnaryOp.Boolean_!, genArg) case _ => throw new FatalError("Unknown unary operation code: " + code) - } - } /** Gen JS code for a simple binary operation. */ - private def genSimpleBinaryOp(tree: Apply, lhs: Tree, rhs: Tree, code: Int): js.Tree = { + private def genSimpleBinaryOp(tree: Apply, lhs: Tree, rhs: Tree, code: Int): js.Tree = import dotty.tools.backend.ScalaPrimitivesOps._ implicit val pos: SourcePosition = tree.sourcePos @@ -2615,12 +2409,12 @@ class JSCodeGen()(using genCtx: Context) { val isShift = isShiftOp(code) - val opType = { - if (isShift) { + val opType = + if (isShift) if (lhsIRType == jstpe.LongType) jstpe.LongType else jstpe.IntType - } else { - (lhsIRType, rhsIRType) match { + else + (lhsIRType, rhsIRType) match case (jstpe.DoubleType, _) | (_, jstpe.DoubleType) => jstpe.DoubleType case (jstpe.FloatType, _) | (_, jstpe.FloatType) => jstpe.FloatType case (jstpe.LongType, _) | (_, jstpe.LongType) => jstpe.LongType @@ -2628,9 +2422,6 @@ class JSCodeGen()(using genCtx: Context) { case (_, jstpe.IntType | jstpe.ByteType | jstpe.ShortType | jstpe.CharType) => jstpe.IntType case (jstpe.BooleanType, _) | (_, jstpe.BooleanType) => jstpe.BooleanType case _ => jstpe.AnyType - } - } - } val lsrc = if (opType == jstpe.AnyType) genExpr(lhs) @@ -2639,18 +2430,18 @@ class JSCodeGen()(using genCtx: Context) { if (opType == jstpe.AnyType) genExpr(rhs) else adaptPrimitive(genExpr(rhs), if (isShift) jstpe.IntType else opType) - if (opType == jstpe.AnyType && isUniversalEqualityOp(code)) { + if (opType == jstpe.AnyType && isUniversalEqualityOp(code)) genUniversalEqualityOp(lhs.tpe, rhs.tpe, lsrc, rsrc, code) - } else if (code == ZOR) { + else if (code == ZOR) js.If(lsrc, js.BooleanLiteral(true), rsrc)(jstpe.BooleanType) - } else if (code == ZAND) { + else if (code == ZAND) js.If(lsrc, rsrc, js.BooleanLiteral(false))(jstpe.BooleanType) - } else { + else import js.BinaryOp._ - (opType: @unchecked) match { + (opType: @unchecked) match case jstpe.IntType => - val op = (code: @switch) match { + val op = (code: @switch) match case ADD => Int_+ case SUB => Int_- case MUL => Int_* @@ -2669,7 +2460,6 @@ class JSCodeGen()(using genCtx: Context) { case LE => Int_<= case GT => Int_> case GE => Int_>= - } js.BinaryOp(op, lsrc, rsrc) case jstpe.FloatType => @@ -2682,7 +2472,7 @@ class JSCodeGen()(using genCtx: Context) { def withDoubles(op: Int): js.Tree = js.BinaryOp(op, toDouble(lsrc), toDouble(rsrc)) - (code: @switch) match { + (code: @switch) match case ADD => withFloats(Float_+) case SUB => withFloats(Float_-) case MUL => withFloats(Float_*) @@ -2695,10 +2485,9 @@ class JSCodeGen()(using genCtx: Context) { case LE => withDoubles(Double_<=) case GT => withDoubles(Double_>) case GE => withDoubles(Double_>=) - } case jstpe.DoubleType => - val op = (code: @switch) match { + val op = (code: @switch) match case ADD => Double_+ case SUB => Double_- case MUL => Double_* @@ -2711,11 +2500,10 @@ class JSCodeGen()(using genCtx: Context) { case LE => Double_<= case GT => Double_> case GE => Double_>= - } js.BinaryOp(op, lsrc, rsrc) case jstpe.LongType => - val op = (code: @switch) match { + val op = (code: @switch) match case ADD => Long_+ case SUB => Long_- case MUL => Long_* @@ -2734,48 +2522,41 @@ class JSCodeGen()(using genCtx: Context) { case LE => Long_<= case GT => Long_> case GE => Long_>= - } js.BinaryOp(op, lsrc, rsrc) case jstpe.BooleanType => - val op = (code: @switch) match { + val op = (code: @switch) match case EQ => Boolean_== case NE => Boolean_!= case OR => Boolean_| case AND => Boolean_& case XOR => Boolean_!= - } js.BinaryOp(op, lsrc, rsrc) case jstpe.AnyType => - val op = code match { + val op = code match case ID => === case NI => !== - } js.BinaryOp(op, lsrc, rsrc) - } - } - } private def adaptPrimitive(value: js.Tree, to: jstpe.Type)( - implicit pos: Position): js.Tree = { + implicit pos: Position): js.Tree = genConversion(value.tpe, to, value) - } /* This method corresponds to the method of the same name in * BCodeBodyBuilder of the JVM back-end. It ends up calling the method * BCodeIdiomatic.emitT2T, whose logic we replicate here. */ private def genConversion(from: jstpe.Type, to: jstpe.Type, value: js.Tree)( - implicit pos: Position): js.Tree = { + implicit pos: Position): js.Tree = import js.UnaryOp._ - if (from == to || from == jstpe.NothingType) { + if (from == to || from == jstpe.NothingType) value - } else if (from == jstpe.BooleanType || to == jstpe.BooleanType) { + else if (from == jstpe.BooleanType || to == jstpe.BooleanType) throw new AssertionError(s"Invalid genConversion from $from to $to") - } else { - def intValue = (from: @unchecked) match { + else + def intValue = (from: @unchecked) match case jstpe.IntType => value case jstpe.CharType => js.UnaryOp(CharToInt, value) case jstpe.ByteType => js.UnaryOp(ByteToInt, value) @@ -2783,16 +2564,14 @@ class JSCodeGen()(using genCtx: Context) { case jstpe.LongType => js.UnaryOp(LongToInt, value) case jstpe.FloatType => js.UnaryOp(DoubleToInt, js.UnaryOp(FloatToDouble, value)) case jstpe.DoubleType => js.UnaryOp(DoubleToInt, value) - } - def doubleValue = from match { + def doubleValue = from match case jstpe.DoubleType => value case jstpe.FloatType => js.UnaryOp(FloatToDouble, value) case jstpe.LongType => js.UnaryOp(LongToDouble, value) case _ => js.UnaryOp(IntToDouble, intValue) - } - (to: @unchecked) match { + (to: @unchecked) match case jstpe.CharType => js.UnaryOp(IntToChar, intValue) case jstpe.ByteType => @@ -2802,12 +2581,11 @@ class JSCodeGen()(using genCtx: Context) { case jstpe.IntType => intValue case jstpe.LongType => - from match { + from match case jstpe.FloatType | jstpe.DoubleType => js.UnaryOp(DoubleToLong, doubleValue) case _ => js.UnaryOp(IntToLong, intValue) - } case jstpe.FloatType => if (from == jstpe.LongType) js.UnaryOp(js.UnaryOp.LongToFloat, value) @@ -2815,32 +2593,26 @@ class JSCodeGen()(using genCtx: Context) { js.UnaryOp(js.UnaryOp.DoubleToFloat, doubleValue) case jstpe.DoubleType => doubleValue - } - } - } /** Gen JS code for a universal equality test. */ private def genUniversalEqualityOp(ltpe: Type, rtpe: Type, lhs: js.Tree, rhs: js.Tree, code: Int)( - implicit pos: SourcePosition): js.Tree = { + implicit pos: SourcePosition): js.Tree = import dotty.tools.backend.ScalaPrimitivesOps._ - val bypassEqEq = { + val bypassEqEq = // Do not call equals if we have a literal null at either side. lhs.isInstanceOf[js.Null] || rhs.isInstanceOf[js.Null] - } - if (bypassEqEq) { + if (bypassEqEq) js.BinaryOp( if (code == EQ) js.BinaryOp.=== else js.BinaryOp.!==, lhs, rhs) - } else { + else val body = genEqEqPrimitive(ltpe, rtpe, lhs, rhs) if (code == EQ) body else js.UnaryOp(js.UnaryOp.Boolean_!, body) - } - } private lazy val externalEqualsNumNum: Symbol = defn.BoxesRunTimeModule.requiredMethod(nme.equalsNumNum) @@ -2853,7 +2625,7 @@ class JSCodeGen()(using genCtx: Context) { /** Gen JS code for a call to Any.== */ private def genEqEqPrimitive(ltpe: Type, rtpe: Type, lsrc: js.Tree, rsrc: js.Tree)( - implicit pos: SourcePosition): js.Tree = { + implicit pos: SourcePosition): js.Tree = report.debuglog(s"$ltpe == $rtpe") val lsym = ltpe.typeSymbol.asClass val rsym = rtpe.typeSymbol.asClass @@ -2870,32 +2642,28 @@ class JSCodeGen()(using genCtx: Context) { * java.lang.Double: their `equals` have different behavior around `NaN` * and `-0.0`, see Javadoc (scala-dev#329, scala-js#2799). */ - val mustUseAnyComparator: Boolean = { - lsym.isJSType || rsym.isJSType || { + val mustUseAnyComparator: Boolean = + lsym.isJSType || rsym.isJSType `||`: val p = ctx.platform - p.isMaybeBoxed(lsym) && p.isMaybeBoxed(rsym) && { + p.isMaybeBoxed(lsym) && p.isMaybeBoxed(rsym) `&&`: val areSameFinals = lsym.is(Final) && rsym.is(Final) && (ltpe =:= rtpe) !areSameFinals || lsym == defn.BoxedFloatClass || lsym == defn.BoxedDoubleClass - } - } - } - if (mustUseAnyComparator) { - val equalsMethod: Symbol = { + if (mustUseAnyComparator) + val equalsMethod: Symbol = val ptfm = ctx.platform - if (lsym.derivesFrom(defn.BoxedNumberClass)) { + if (lsym.derivesFrom(defn.BoxedNumberClass)) if (rsym.derivesFrom(defn.BoxedNumberClass)) externalEqualsNumNum else if (rsym.derivesFrom(defn.BoxedCharClass)) externalEqualsNumObject // will be externalEqualsNumChar in 2.12, SI-9030 else externalEqualsNumObject - } else externalEquals - } + else externalEquals genApplyStatic(equalsMethod, List(lsrc, rsrc)) - } else { + else // if (lsrc eq null) rsrc eq null else lsrc.equals(rsrc) - if (lsym == defn.StringClass) { + if (lsym == defn.StringClass) // String.equals(that) === (this eq that) js.BinaryOp(js.BinaryOp.===, lsrc, rsrc) - } else { + else /* This requires to evaluate both operands in local values first. * The optimizer will eliminate them if possible. */ @@ -2908,29 +2676,24 @@ class JSCodeGen()(using genCtx: Context) { js.BinaryOp(js.BinaryOp.===, rtemp.ref, js.Null()), genApplyMethod(ltemp.ref, defn.Any_equals, List(rtemp.ref)))( jstpe.BooleanType)) - } - } - } /** Gen JS code for string concatenation. */ private def genStringConcat(tree: Apply, receiver: Tree, - args: List[Tree]): js.Tree = { + args: List[Tree]): js.Tree = implicit val pos = tree.span js.BinaryOp(js.BinaryOp.String_+, genExpr(receiver), genExpr(args.head)) - } /** Gen JS code for a call to Any.## */ - private def genScalaHash(tree: Apply, receiver: Tree): js.Tree = { + private def genScalaHash(tree: Apply, receiver: Tree): js.Tree = implicit val pos: SourcePosition = tree.sourcePos genModuleApplyMethod(defn.ScalaRuntimeModule.requiredMethod(nme.hash_), List(genExpr(receiver))) - } /** Gen JS code for an array operation (get, set or length) */ - private def genArrayOp(tree: Tree, code: Int): js.Tree = { + private def genArrayOp(tree: Tree, code: Int): js.Tree = import dotty.tools.backend.ScalaPrimitivesOps._ implicit val pos = tree.span @@ -2941,36 +2704,33 @@ class JSCodeGen()(using genCtx: Context) { val genArray = genExpr(arrayObj) val genArgs = args.map(genExpr) - def elementType: Type = arrayObj.tpe.widenDealias match { + def elementType: Type = arrayObj.tpe.widenDealias match case defn.ArrayOf(el) => el case JavaArrayType(el) => el case tpe => val msg = em"expected Array $tpe" report.error(msg) ErrorType(msg) - } def genSelect(): js.AssignLhs = js.ArraySelect(genArray, genArgs(0))(toIRType(elementType)) - if (isArrayGet(code)) { + if (isArrayGet(code)) // get an item of the array assert(args.length == 1, s"Array get requires 1 argument, found ${args.length} in $tree") genSelect() - } else if (isArraySet(code)) { + else if (isArraySet(code)) // set an item of the array assert(args.length == 2, s"Array set requires 2 arguments, found ${args.length} in $tree") js.Assign(genSelect(), genArgs(1)) - } else { + else // length of the array js.ArrayLength(genArray) - } - } /** Gen JS code for a call to AnyRef.synchronized */ - private def genSynchronized(tree: Apply, isStat: Boolean): js.Tree = { + private def genSynchronized(tree: Apply, isStat: Boolean): js.Tree = /* JavaScript is single-threaded, so we can drop the * synchronization altogether. */ @@ -2980,7 +2740,7 @@ class JSCodeGen()(using genCtx: Context) { val genReceiver = genExpr(receiver) val genArg = genStatOrExpr(arg, isStat) - genReceiver match { + genReceiver match case js.This() => // common case for which there is no side-effect nor NPE genArg @@ -2991,31 +2751,26 @@ class JSCodeGen()(using genCtx: Context) { js.Throw(js.New(NullPointerExceptionClass, js.MethodIdent(jsNames.NoArgConstructorName), Nil)), js.Skip())(jstpe.NoType), genArg) - } - } /** Gen JS code for a coercion */ - private def genCoercion(tree: Apply, receiver: Tree, code: Int): js.Tree = { + private def genCoercion(tree: Apply, receiver: Tree, code: Int): js.Tree = implicit val pos = tree.span val source = genExpr(receiver) val resultType = toIRType(tree.tpe) adaptPrimitive(source, resultType) - } /** Gen a call to the special `throw` method. */ - private def genThrow(tree: Apply, args: List[Tree]): js.Tree = { + private def genThrow(tree: Apply, args: List[Tree]): js.Tree = implicit val pos: SourcePosition = tree.sourcePos val exception = args.head val genException = genExpr(exception) - genException match { + genException match case js.New(cls, _, _) if cls != JavaScriptExceptionClassName => // Common case where ex is neither null nor a js.JavaScriptException js.Throw(genException) case _ => js.Throw(js.UnwrapFromThrowable(genException)) - } - } /** Gen a "normal" apply (to a true method). * @@ -3026,35 +2781,31 @@ class JSCodeGen()(using genCtx: Context) { * * Calls to methods in impl classes of Scala2 traits. * * Regular method call */ - private def genNormalApply(tree: Apply, isStat: Boolean): js.Tree = { + private def genNormalApply(tree: Apply, isStat: Boolean): js.Tree = implicit val pos = tree.span - val fun = tree.fun match { + val fun = tree.fun match case fun: Ident => desugarIdent(fun).get case fun: Select => fun - } val receiver = fun.qualifier val args = tree.args val sym = fun.symbol - def isStringMethodFromObject: Boolean = sym.name match { + def isStringMethodFromObject: Boolean = sym.name match case nme.toString_ | nme.equals_ | nme.hashCode_ => true case _ => false - } - if (isMethodStaticInIR(sym)) { + if (isMethodStaticInIR(sym)) genApplyStatic(sym, genActualArgs(sym, args)) - } else if (sym.owner.isJSType) { + else if (sym.owner.isJSType) if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) genApplyJSMethodGeneric(sym, genExprOrGlobalScope(receiver), genActualJSArgs(sym, args), isStat)(tree.sourcePos) else genApplyJSClassMethod(genExpr(receiver), sym, genActualArgs(sym, args)) - } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { + else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) genJSNativeMemberCall(tree) - } else { + else genApplyMethodMaybeStatically(genExpr(receiver), sym, genActualArgs(sym, args)) - } - } /** Gen JS code for a call to a JS method (of a subclass of `js.Any`). * @@ -3070,30 +2821,27 @@ class JSCodeGen()(using genCtx: Context) { private def genApplyJSMethodGeneric(sym: Symbol, receiver: MaybeGlobalScope, args: List[js.TreeOrJSSpread], isStat: Boolean, jsSuperClassValue: Option[js.Tree] = None)( - implicit pos: SourcePosition): js.Tree = { + implicit pos: SourcePosition): js.Tree = - def argsNoSpread: List[js.Tree] = { + def argsNoSpread: List[js.Tree] = assert(!args.exists(_.isInstanceOf[js.JSSpread]), s"Unexpected spread at $pos") args.asInstanceOf[List[js.Tree]] - } val argc = args.size // meaningful only for methods that don't have varargs - def requireNotSuper(): Unit = { + def requireNotSuper(): Unit = if (jsSuperClassValue.isDefined) report.error("Illegal super call in Scala.js-defined JS class", pos) - } def requireNotSpread(arg: js.TreeOrJSSpread): js.Tree = arg.asInstanceOf[js.Tree] - def genSuperReference(propName: js.Tree): js.AssignLhs = { + def genSuperReference(propName: js.Tree): js.AssignLhs = jsSuperClassValue.fold[js.AssignLhs] { genJSSelectOrGlobalRef(receiver, propName) } { superClassValue => js.JSSuperSelect(superClassValue, ruleOutGlobalScope(receiver), propName) } - } def genSelectGet(propName: js.Tree): js.Tree = genSuperReference(propName) @@ -3101,15 +2849,14 @@ class JSCodeGen()(using genCtx: Context) { def genSelectSet(propName: js.Tree, value: js.Tree): js.Tree = js.Assign(genSuperReference(propName), value) - def genCall(methodName: js.Tree, args: List[js.TreeOrJSSpread]): js.Tree = { + def genCall(methodName: js.Tree, args: List[js.TreeOrJSSpread]): js.Tree = jsSuperClassValue.fold[js.Tree] { genJSMethodApplyOrGlobalRefApply(receiver, methodName, args) } { superClassValue => js.JSSuperMethodCall(superClassValue, ruleOutGlobalScope(receiver), methodName, args) } - } - val boxedResult = sym.jsCallingConvention match { + val boxedResult = sym.jsCallingConvention match case JSCallingConvention.UnaryOp(code) => requireNotSuper() assert(argc == 0, s"bad argument count ($argc) for unary op at $pos") @@ -3128,24 +2875,22 @@ class JSCodeGen()(using genCtx: Context) { js.JSFunctionApply(ruleOutGlobalScope(receiver), args) case JSCallingConvention.Property(jsName) => - argsNoSpread match { + argsNoSpread match case Nil => genSelectGet(genExpr(jsName)) case value :: Nil => genSelectSet(genExpr(jsName), value) case _ => throw new AssertionError(s"property methods should have 0 or 1 non-varargs arguments at $pos") - } case JSCallingConvention.BracketAccess => - argsNoSpread match { + argsNoSpread match case keyArg :: Nil => genSelectGet(keyArg) case keyArg :: valueArg :: Nil => genSelectSet(keyArg, valueArg) case _ => throw new AssertionError(s"@JSBracketAccess methods should have 1 or 2 non-varargs arguments at $pos") - } case JSCallingConvention.BracketCall => val (methodName, actualArgs) = extractFirstArg(args) @@ -3153,15 +2898,13 @@ class JSCodeGen()(using genCtx: Context) { case JSCallingConvention.Method(jsName) => genCall(genExpr(jsName), args) - } - if (isStat) { + if (isStat) boxedResult - } else { - val tpe = atPhase(elimErasedValueTypePhase) { + else + val tpe = atPhase(elimErasedValueTypePhase): sym.info.finalResultType - } - if (tpe.isRef(defn.BoxedUnitClass) && sym.isGetter) { + if (tpe.isRef(defn.BoxedUnitClass) && sym.isGetter) /* Work around to reclaim Scala 2 erasure behavior, assumed by the test * NonNativeJSTypeTest.defaultValuesForFields. * Scala 2 erases getters of `Unit`-typed fields as returning `Unit` @@ -3175,18 +2918,15 @@ class JSCodeGen()(using genCtx: Context) { * not to assume such a strict interpretation of erasure. */ js.Block(boxedResult, js.Undefined()) - } else { + else unbox(boxedResult, tpe) - } - } - } /** Extract the first argument in a list of actual arguments. * * This is nothing else than decomposing into head and tail, except that * we assert that the first element is not a JSSpread. */ - private def extractFirstArg(args: List[js.TreeOrJSSpread]): (js.Tree, List[js.TreeOrJSSpread]) = { + private def extractFirstArg(args: List[js.TreeOrJSSpread]): (js.Tree, List[js.TreeOrJSSpread]) = assert(args.nonEmpty, "Trying to extract the first argument of an empty argument list") val firstArg = args.head @@ -3194,7 +2934,6 @@ class JSCodeGen()(using genCtx: Context) { "Trying to extract the first argument of an argument list starting " + "with a Spread argument: " + firstArg) (firstArg.asInstanceOf[js.Tree], args.tail) - } /** Gen JS code for a call to a native JS def or val. */ private def genJSNativeMemberSelect(tree: Tree): js.Tree = @@ -3205,7 +2944,7 @@ class JSCodeGen()(using genCtx: Context) { genJSNativeMemberSelectOrCall(tree, tree.args) /** Gen JS code for a call to a native JS def or val. */ - private def genJSNativeMemberSelectOrCall(tree: Tree, args: List[Tree]): js.Tree = { + private def genJSNativeMemberSelectOrCall(tree: Tree, args: List[Tree]): js.Tree = val sym = tree.symbol implicit val pos = tree.span @@ -3220,9 +2959,8 @@ class JSCodeGen()(using genCtx: Context) { unbox(boxedResult, atPhase(elimErasedValueTypePhase) { sym.info.resultType }) - } - private def genJSSuperCall(tree: Apply, isStat: Boolean): js.Tree = { + private def genJSSuperCall(tree: Apply, isStat: Boolean): js.Tree = acquireContextualJSClassValue { explicitJSSuperClassValue => implicit val pos = tree.span val Apply(fun @ Select(sup @ Super(qual, _), _), args) = tree: @unchecked @@ -3232,26 +2970,23 @@ class JSCodeGen()(using genCtx: Context) { def genScalaArgs = genActualArgs(sym, args) def genJSArgs = genActualJSArgs(sym, args) - if (sym.owner == defn.ObjectClass) { + if (sym.owner == defn.ObjectClass) // Normal call anyway assert(!sym.isClassConstructor, s"Trying to call the super constructor of Object in a non-native JS class at $pos") genApplyMethod(genReceiver, sym, genScalaArgs) - } else if (sym.isClassConstructor) { + else if (sym.isClassConstructor) throw new AssertionError( s"calling a JS super constructor should have happened in genPrimaryJSClassCtor at $pos") - } else if (sym.owner.isNonNativeJSClass && !sym.isJSExposed) { + else if (sym.owner.isNonNativeJSClass && !sym.isJSExposed) // Reroute to the static method genApplyJSClassMethod(genReceiver, sym, genScalaArgs) - } else { - val jsSuperClassValue = explicitJSSuperClassValue.orElse { + else + val jsSuperClassValue = explicitJSSuperClassValue.orElse: Some(genLoadJSConstructor(currentClassSym.get.asClass.superClass)) - } genApplyJSMethodGeneric(sym, MaybeGlobalScope.NotGlobalScope(genReceiver), genJSArgs, isStat, jsSuperClassValue)(tree.sourcePos) - } } - } /** Gen JS code for a call to a polymorphic method. * @@ -3261,7 +2996,7 @@ class JSCodeGen()(using genCtx: Context) { * (Well, in fact `DottyRunTime.newRefArray` too, but it is handled as a * primitive instead.) */ - private def genTypeApply(tree: TypeApply): js.Tree = { + private def genTypeApply(tree: TypeApply): js.Tree = implicit val pos: SourcePosition = tree.sourcePos val TypeApply(fun, targs) = tree @@ -3278,27 +3013,24 @@ class JSCodeGen()(using genCtx: Context) { val genReceiver = genExpr(receiver) - if (sym == defn.Any_asInstanceOf) { + if (sym == defn.Any_asInstanceOf) genAsInstanceOf(genReceiver, to) - } else if (sym == defn.Any_isInstanceOf) { + else if (sym == defn.Any_isInstanceOf) genIsInstanceOf(genReceiver, to) - } else { + else throw new FatalError( s"Unexpected type application $fun with symbol ${sym.fullName}") - } - } /** Gen JS code for a Java Seq literal. */ - private def genJavaSeqLiteral(tree: JavaSeqLiteral): js.Tree = { + private def genJavaSeqLiteral(tree: JavaSeqLiteral): js.Tree = implicit val pos = tree.span val genElems = tree.elems.map(genExpr) val arrayTypeRef = toTypeRef(tree.tpe).asInstanceOf[jstpe.ArrayTypeRef] js.ArrayValue(arrayTypeRef, genElems) - } /** Gen JS code for a switch-`Match`, which is translated into an IR `js.Match`. */ - def genMatch(tree: Tree, isStat: Boolean): js.Tree = { + def genMatch(tree: Tree, isStat: Boolean): js.Tree = implicit val pos = tree.span val Match(selector, cases) = tree: @unchecked @@ -3308,23 +3040,21 @@ class JSCodeGen()(using genCtx: Context) { val genSelector = genExpr(selector) // Sanity check: we can handle Ints and Strings (including `null`s), but nothing else - genSelector.tpe match { + genSelector.tpe match case jstpe.IntType | jstpe.ClassType(jsNames.BoxedStringClass) | jstpe.NullType | jstpe.NothingType => // ok case _ => abortMatch(s"Invalid selector type ${genSelector.tpe}") - } - val resultType = toIRType(tree.tpe) match { + val resultType = toIRType(tree.tpe) match case jstpe.NothingType => jstpe.NothingType // must take priority over NoType below case _ if isStat => jstpe.NoType case resType => resType - } var clauses: List[(List[js.MatchableLiteral], js.Tree)] = Nil var optDefaultClause: Option[js.Tree] = None - for (caze @ CaseDef(pat, guard, body) <- cases) { + for (caze @ CaseDef(pat, guard, body) <- cases) if (guard != EmptyTree) abortMatch("Found a case guard") @@ -3333,33 +3063,27 @@ class JSCodeGen()(using genCtx: Context) { def invalidCase(): Nothing = abortMatch("Invalid case") - def genMatchableLiteral(tree: Literal): js.MatchableLiteral = { - genExpr(tree) match { + def genMatchableLiteral(tree: Literal): js.MatchableLiteral = + genExpr(tree) match case matchableLiteral: js.MatchableLiteral => matchableLiteral case otherExpr => invalidCase() - } - } - pat match { + pat match case lit: Literal => clauses = (List(genMatchableLiteral(lit)), genBody) :: clauses case Ident(nme.WILDCARD) => optDefaultClause = Some(genBody) case Alternative(alts) => - val genAlts = alts.map { + val genAlts = alts.map: case lit: Literal => genMatchableLiteral(lit) case _ => invalidCase() - } clauses = (genAlts, genBody) :: clauses case _ => invalidCase() - } - } clauses = clauses.reverse - val defaultClause = optDefaultClause.getOrElse { + val defaultClause = optDefaultClause.getOrElse: throw new AssertionError("No elseClause in pattern match") - } /* Builds a `js.Match`, but simplifies it to a `js.If` if there is only * one case with one alternative, and to a `js.Block` if there is no case @@ -3370,7 +3094,7 @@ class JSCodeGen()(using genCtx: Context) { */ def isInt(tree: js.Tree): Boolean = tree.tpe == jstpe.IntType - clauses match { + clauses match case Nil => // Completely remove the Match. Preserve the side-effects of `genSelector`. js.Block(exprToStat(genSelector), defaultClause) @@ -3388,8 +3112,6 @@ class JSCodeGen()(using genCtx: Context) { case _ => // We have more than one case: use a js.Match js.Match(genSelector, clauses, defaultClause)(resultType) - } - } /** Gen JS code for a closure. * @@ -3432,17 +3154,16 @@ class JSCodeGen()(using genCtx: Context) { * actual arguments at the call-site of the closure, and they are also * available in the `body`. */ - private def genClosure(tree: Closure): js.Tree = { + private def genClosure(tree: Closure): js.Tree = implicit val pos = tree.span val Closure(env, call, functionalInterface) = tree val envSize = env.size - val (fun, args) = call match { + val (fun, args) = call match // case Apply(fun, args) => (fun, args) // Conjectured not to happen case t @ Select(_, _) => (t, Nil) case t @ Ident(_) => (t, Nil) - } val sym = fun.symbol val isStaticCall = isMethodStaticInIR(sym) @@ -3453,11 +3174,10 @@ class JSCodeGen()(using genCtx: Context) { val formalAndActualCaptures = allCaptureValues.map { value => implicit val pos = value.span - val (formalIdent, originalName) = value match { + val (formalIdent, originalName) = value match case Ident(name) => (freshLocalIdent(name.toTermName), OriginalName(name.toString)) case This(_) => (freshLocalIdent("this"), thisOriginalName) case _ => (freshLocalIdent(), NoOriginalName) - } val formalCapture = js.ParamDef(formalIdent, originalName, toIRType(value.tpe), mutable = false) val actualCapture = genExpr(value) @@ -3466,13 +3186,11 @@ class JSCodeGen()(using genCtx: Context) { val (formalCaptures, actualCaptures) = formalAndActualCaptures.unzip val funInterfaceSym = functionalInterface.tpe.typeSymbol - val hasRepeatedParam = { - funInterfaceSym.exists && { + val hasRepeatedParam = + funInterfaceSym.exists `&&`: val Seq(samMethodDenot) = funInterfaceSym.info.possibleSamMethods val samMethod = samMethodDenot.symbol atPhase(elimRepeatedPhase)(samMethod.info.paramInfoss.flatten.exists(_.isRepeatedParam)) - } - } val formalParamNames = sym.info.paramNamess.flatten.drop(envSize) val formalParamTypes = sym.info.paramInfoss.flatten.drop(envSize) @@ -3480,7 +3198,7 @@ class JSCodeGen()(using genCtx: Context) { if (hasRepeatedParam) (0 until (formalParamTypes.size - 1)).map(_ => false) :+ true else (0 until formalParamTypes.size).map(_ => false) - val formalAndActualParams = formalParamNames.lazyZip(formalParamTypes).lazyZip(formalParamRepeateds).map { + val formalAndActualParams = formalParamNames.lazyZip(formalParamTypes).lazyZip(formalParamRepeateds).map: (name, tpe, repeated) => val formalParam = js.ParamDef(freshLocalIdent(name), OriginalName(name.toString), jstpe.AnyType, mutable = false) @@ -3488,34 +3206,30 @@ class JSCodeGen()(using genCtx: Context) { if (repeated) genJSArrayToVarArgs(formalParam.ref)(tree.sourcePos) else unbox(formalParam.ref, tpe) (formalParam, actualParam) - } val (formalAndRestParams, actualParams) = formalAndActualParams.unzip val (formalParams, restParam) = if (hasRepeatedParam) (formalAndRestParams.init, Some(formalAndRestParams.last)) else (formalAndRestParams, None) - val genBody = { - val call = if (isStaticCall) { + val genBody = + val call = if (isStaticCall) genApplyStatic(sym, formalCaptures.map(_.ref) ::: actualParams) - } else { + else val thisCaptureRef :: argCaptureRefs = formalCaptures.map(_.ref): @unchecked if (!sym.owner.isNonNativeJSClass || sym.isJSExposed) genApplyMethodMaybeStatically(thisCaptureRef, sym, argCaptureRefs ::: actualParams) else genApplyJSClassMethod(thisCaptureRef, sym, argCaptureRefs ::: actualParams) - } box(call, sym.info.finalResultType) - } - val isThisFunction = funInterfaceSym.isSubClass(jsdefn.JSThisFunctionClass) && { + val isThisFunction = funInterfaceSym.isSubClass(jsdefn.JSThisFunctionClass) `&&`: val ok = formalParams.nonEmpty if (!ok) report.error("The SAM or apply method for a js.ThisFunction must have a leading non-varargs parameter", tree) ok - } - if (isThisFunction) { + if (isThisFunction) val thisParam :: otherParams = formalParams: @unchecked js.Closure( arrow = false, @@ -3528,27 +3242,24 @@ class JSCodeGen()(using genCtx: Context) { js.This()(thisParam.ptpe)(thisParam.pos))(thisParam.pos), genBody), actualCaptures) - } else { + else val closure = js.Closure(arrow = true, formalCaptures, formalParams, restParam, genBody, actualCaptures) - if (!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym)) { + if (!funInterfaceSym.exists || defn.isFunctionClass(funInterfaceSym)) val formalCount = formalParams.size val cls = ClassName("scala.scalajs.runtime.AnonFunction" + formalCount) val ctorName = MethodName.constructor( jstpe.ClassRef(ClassName("scala.scalajs.js.Function" + formalCount)) :: Nil) js.New(cls, js.MethodIdent(ctorName), List(closure)) - } else if (funInterfaceSym.name == tpnme.FunctionXXL && funInterfaceSym.owner == defn.ScalaRuntimePackageClass) { + else if (funInterfaceSym.name == tpnme.FunctionXXL && funInterfaceSym.owner == defn.ScalaRuntimePackageClass) val cls = ClassName("scala.scalajs.runtime.AnonFunctionXXL") val ctorName = MethodName.constructor( jstpe.ClassRef(ClassName("scala.scalajs.js.Function1")) :: Nil) js.New(cls, js.MethodIdent(ctorName), List(closure)) - } else { + else assert(funInterfaceSym.isJSType, s"Invalid functional interface $funInterfaceSym reached the back-end") closure - } - } - } /** Generates a static method instantiating and calling this * DynamicImportThunk's `apply`: @@ -3559,16 +3270,15 @@ class JSCodeGen()(using genCtx: Context) { * } * }}} */ - private def genDynamicImportForwarder(clsSym: Symbol)(using Position): js.MethodDef = { - withNewLocalNameScope { + private def genDynamicImportForwarder(clsSym: Symbol)(using Position): js.MethodDef = + withNewLocalNameScope: val ctor = clsSym.primaryConstructor val paramSyms = ctor.paramSymss.flatten val paramDefs = paramSyms.map(genParamDef(_)) - val body = { + val body = val inst = js.New(encodeClassName(clsSym), encodeMethodSym(ctor), paramDefs.map(_.ref)) genApplyMethod(inst, jsdefn.DynamicImportThunkClass_apply, Nil) - } js.MethodDef( js.MemberFlags.empty.withNamespace(js.MemberNamespace.PublicStatic), @@ -3577,8 +3287,6 @@ class JSCodeGen()(using genCtx: Context) { paramDefs, jstpe.AnyType, Some(body))(OptimizerHints.empty, None) - } - } /** Boxes a value of the given type before `elimErasedValueType`. * @@ -3590,8 +3298,8 @@ class JSCodeGen()(using genCtx: Context) { * @param tpeEnteringElimErasedValueType The type of `expr` as it was * entering the `elimErasedValueType` phase. */ - def box(expr: js.Tree, tpeEnteringElimErasedValueType: Type)(implicit pos: Position): js.Tree = { - tpeEnteringElimErasedValueType match { + def box(expr: js.Tree, tpeEnteringElimErasedValueType: Type)(implicit pos: Position): js.Tree = + tpeEnteringElimErasedValueType match case tpe if isPrimitiveValueType(tpe) => makePrimitiveBox(expr, tpe) @@ -3602,8 +3310,6 @@ class JSCodeGen()(using genCtx: Context) { case _ => expr - } - } /** Unboxes a value typed as Any to the given type before `elimErasedValueType`. * @@ -3615,8 +3321,8 @@ class JSCodeGen()(using genCtx: Context) { * @param tpeEnteringElimErasedValueType The type of `expr` as it was * entering the `elimErasedValueType` phase. */ - def unbox(expr: js.Tree, tpeEnteringElimErasedValueType: Type)(implicit pos: Position): js.Tree = { - tpeEnteringElimErasedValueType match { + def unbox(expr: js.Tree, tpeEnteringElimErasedValueType: Type)(implicit pos: Position): js.Tree = + tpeEnteringElimErasedValueType match case tpe if isPrimitiveValueType(tpe) => makePrimitiveUnbox(expr, tpe) @@ -3632,16 +3338,14 @@ class JSCodeGen()(using genCtx: Context) { case tpe => genAsInstanceOf(expr, tpe) - } - } /** Gen JS code for an asInstanceOf cast (for reference types only) */ private def genAsInstanceOf(value: js.Tree, to: Type)(implicit pos: Position): js.Tree = genAsInstanceOf(value, toIRType(to)) /** Gen JS code for an asInstanceOf cast (for reference types only) */ - private def genAsInstanceOf(value: js.Tree, to: jstpe.Type)(implicit pos: Position): js.Tree = { - to match { + private def genAsInstanceOf(value: js.Tree, to: jstpe.Type)(implicit pos: Position): js.Tree = + to match case jstpe.AnyType => value case jstpe.NullType => @@ -3654,93 +3358,81 @@ class JSCodeGen()(using genCtx: Context) { js.Block(value, genThrowClassCastException()) case _ => js.AsInstanceOf(value, to) - } - } - private def genThrowClassCastException()(implicit pos: Position): js.Tree = { + private def genThrowClassCastException()(implicit pos: Position): js.Tree = js.Throw(js.New(jsNames.ClassCastExceptionClass, js.MethodIdent(jsNames.NoArgConstructorName), Nil)) - } /** Gen JS code for an isInstanceOf test (for reference types only) */ def genIsInstanceOf(value: js.Tree, to: Type)( - implicit pos: SourcePosition): js.Tree = { + implicit pos: SourcePosition): js.Tree = val sym = to.typeSymbol - if (sym == defn.ObjectClass) { + if (sym == defn.ObjectClass) js.BinaryOp(js.BinaryOp.!==, value, js.Null()) - } else if (sym.isJSType) { - if (sym.is(Trait)) { + else if (sym.isJSType) + if (sym.is(Trait)) report.error( em"isInstanceOf[${sym.fullName}] not supported because it is a JS trait", pos) js.BooleanLiteral(true) - } else { + else js.AsInstanceOf(js.JSBinaryOp( js.JSBinaryOp.instanceof, value, genLoadJSConstructor(sym)), jstpe.BooleanType) - } - } else { + else // The Scala type system prevents x.isInstanceOf[Null] and ...[Nothing] assert(sym != defn.NullClass && sym != defn.NothingClass, s"Found a .isInstanceOf[$sym] at $pos") js.IsInstanceOf(value, toIRType(to)) - } - } /** Gen a statically linked call to an instance method. */ def genApplyMethodMaybeStatically(receiver: js.Tree, method: Symbol, - arguments: List[js.Tree])(implicit pos: Position): js.Tree = { + arguments: List[js.Tree])(implicit pos: Position): js.Tree = if (method.isPrivate || method.isClassConstructor) genApplyMethodStatically(receiver, method, arguments) else genApplyMethod(receiver, method, arguments) - } /** Gen a dynamically linked call to a Scala method. */ def genApplyMethod(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { + implicit pos: Position): js.Tree = assert(!method.isPrivate, s"Cannot generate a dynamic call to private method $method at $pos") js.Apply(js.ApplyFlags.empty, receiver, encodeMethodSym(method), arguments)( toIRType(patchedResultType(method))) - } /** Gen a statically linked call to an instance method. */ def genApplyMethodStatically(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { + implicit pos: Position): js.Tree = val flags = js.ApplyFlags.empty .withPrivate(method.isPrivate && !method.isClassConstructor) .withConstructor(method.isClassConstructor) js.ApplyStatically(flags, receiver, encodeClassName(method.owner), encodeMethodSym(method), arguments)( toIRType(patchedResultType(method))) - } /** Gen a call to a static method. */ private def genApplyStatic(method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { + implicit pos: Position): js.Tree = js.ApplyStatic(js.ApplyFlags.empty.withPrivate(method.isPrivate), encodeClassName(method.owner), encodeMethodSym(method), arguments)( toIRType(patchedResultType(method))) - } /** Gen a call to a non-exposed method of a non-native JS class. */ def genApplyJSClassMethod(receiver: js.Tree, method: Symbol, arguments: List[js.Tree])( - implicit pos: Position): js.Tree = { + implicit pos: Position): js.Tree = genApplyStatic(method, receiver :: arguments) - } /** Gen a call to a method of a Scala top-level module. */ private def genModuleApplyMethod(methodSym: Symbol, arguments: List[js.Tree])( - implicit pos: SourcePosition): js.Tree = { + implicit pos: SourcePosition): js.Tree = genApplyMethod(genLoadModule(methodSym.owner), methodSym, arguments) - } /** Gen a boxing operation (tpe is the primitive type) */ private def makePrimitiveBox(expr: js.Tree, tpe: Type)( - implicit pos: Position): js.Tree = { - toIRType(tpe) match { + implicit pos: Position): js.Tree = + toIRType(tpe) match case jstpe.NoType => // for JS interop cases js.Block(expr, js.Undefined()) case jstpe.BooleanType | jstpe.CharType | jstpe.ByteType | @@ -3750,53 +3442,46 @@ class JSCodeGen()(using genCtx: Context) { case typeRef => throw new FatalError( s"makePrimitiveBox requires a primitive type, found $typeRef for $tpe at $pos") - } - } /** Gen an unboxing operation (tpe is the primitive type) */ private def makePrimitiveUnbox(expr: js.Tree, tpe: Type)( - implicit pos: Position): js.Tree = { - toIRType(tpe) match { + implicit pos: Position): js.Tree = + toIRType(tpe) match case jstpe.NoType => expr // for JS interop cases case irTpe => js.AsInstanceOf(expr, irTpe) - } - } /** Gen JS code for a Scala.js-specific primitive method */ private def genJSPrimitive(tree: Apply, args: List[Tree], code: Int, - isStat: Boolean): js.Tree = { + isStat: Boolean): js.Tree = import JSPrimitives._ implicit val pos = tree.span - def genArgs1: js.Tree = { + def genArgs1: js.Tree = assert(args.size == 1, s"Expected exactly 1 argument for JS primitive $code but got " + s"${args.size} at $pos") genExpr(args.head) - } - def genArgs2: (js.Tree, js.Tree) = { + def genArgs2: (js.Tree, js.Tree) = assert(args.size == 2, s"Expected exactly 2 arguments for JS primitive $code but got " + s"${args.size} at $pos") (genExpr(args.head), genExpr(args.tail.head)) - } def genArgsVarLength: List[js.TreeOrJSSpread] = genActualJSArgs(tree.symbol, args) - def resolveReifiedJSClassSym(arg: Tree): Symbol = { - def fail(): Symbol = { + def resolveReifiedJSClassSym(arg: Tree): Symbol = + def fail(): Symbol = report.error( tree.symbol.name.toString + " must be called with a constant " + "classOf[T] representing a class extending js.Any " + "(not a trait nor an object)", tree.sourcePos) NoSymbol - } - arg match { + arg match case Literal(value) if value.tag == Constants.ClazzTag => val classSym = value.typeValue.typeSymbol if (classSym.isJSType && !classSym.is(Trait) && !classSym.is(ModuleClass)) @@ -3805,10 +3490,8 @@ class JSCodeGen()(using genCtx: Context) { fail() case _ => fail() - } - } - (code: @switch) match { + (code: @switch) match case DYNNEW => // js.Dynamic.newInstance(clazz)(actualArgs: _*) val (jsClass, actualArgs) = extractFirstArg(genArgsVarLength) @@ -3831,11 +3514,11 @@ class JSCodeGen()(using genCtx: Context) { // runtime.createLocalJSClass(clazz, superClass, fakeNewInstances) val classSym = resolveReifiedJSClassSym(args(0)) val superClassValue = genExpr(args(1)) - if (classSym == NoSymbol) { + if (classSym == NoSymbol) js.Undefined() // compile error emitted by resolveReifiedJSClassSym - } else { - val captureValues = { - if (code == CREATE_INNER_JS_CLASS) { + else + val captureValues = + if (code == CREATE_INNER_JS_CLASS) /* Private inner classes that do not actually access their outer * pointer do not receive an outer argument. We therefore count * the number of constructors that have non-empty param list to @@ -3845,22 +3528,18 @@ class JSCodeGen()(using genCtx: Context) { classSym.info.decls.lookupAll(nme.CONSTRUCTOR).count(_.info.paramInfoss.head.nonEmpty) val outer = genThis() List.fill(requiredThisParams)(outer) - } else { + else val fakeNewInstances = args(2).asInstanceOf[JavaSeqLiteral].elems fakeNewInstances.flatMap(genCaptureValuesFromFakeNewInstance(_)) - } - } js.CreateJSClass(encodeClassName(classSym), superClassValue :: captureValues) - } case WITH_CONTEXTUAL_JS_CLASS_VALUE => // withContextualJSClassValue(jsclass, inner) val jsClassValue = genExpr(args(0)) withScopedVars( contextualJSClassValue := Some(jsClassValue) - ) { + ): genStatOrExpr(args(1), isStat) - } case LINKING_INFO => // runtime.linkingInfo @@ -3877,14 +3556,13 @@ class JSCodeGen()(using genCtx: Context) { case JS_NEW_TARGET => // js.new.target val valid = currentMethodSym.get.isClassConstructor && currentClassSym.isNonNativeJSClass - if (!valid) { + if (!valid) report.error( "Illegal use of js.`new`.target.\n" + "It can only be used in the constructor of a JS class, " + "as a statement or in the rhs of a val or var.\n" + "It cannot be used inside a lambda or by-name parameter, nor in any other location.", tree.sourcePos) - } js.JSNewTarget() case JS_IMPORT => @@ -3902,7 +3580,7 @@ class JSCodeGen()(using genCtx: Context) { s"Expected exactly 1 argument for JS primitive $code but got " + s"${args.size} at $pos") - args.head match { + args.head match case Block(stats, expr @ Typed(Apply(fun @ Select(New(tpt), _), args), _)) => /* stats is always empty if no other compiler plugin is present. * However, code instrumentation (notably scoverage) might add @@ -3934,7 +3612,6 @@ class JSCodeGen()(using genCtx: Context) { case tree => throw new FatalError( s"Unexpected argument tree in dynamicImport: $tree/${tree.getClass} at: $pos") - } case JS_NATIVE => // js.native @@ -3946,10 +3623,9 @@ class JSCodeGen()(using genCtx: Context) { case TYPEOF => // js.typeOf(arg) val arg = genArgs1 - val typeofExpr = arg match { + val typeofExpr = arg match case arg: js.JSGlobalRef => js.JSTypeOfGlobalRef(arg) case _ => js.JSUnaryOp(js.JSUnaryOp.typeof, arg) - } js.AsInstanceOf(typeofExpr, jstpe.ClassType(jsNames.BoxedStringClass)) case STRICT_EQ => @@ -4071,8 +3747,6 @@ class JSCodeGen()(using genCtx: Context) { case REFLECT_SELECTABLE_APPLYDYN => // scala.reflect.Selectable.applyDynamic genReflectiveCall(tree, isSelectDynamic = false) - } - } /** Gen the SJSIR for a reflective call. * @@ -4159,7 +3833,7 @@ class JSCodeGen()(using genCtx: Context) { * Virtually all of the code in `genReflectiveCall` deals with recovering * those elements. Constructing the IR Tree is the easy part after that. */ - private def genReflectiveCall(tree: Apply, isSelectDynamic: Boolean): js.Tree = { + private def genReflectiveCall(tree: Apply, isSelectDynamic: Boolean): js.Tree = implicit val pos = tree.span val Apply(fun @ Select(receiver, _), args) = tree: @unchecked @@ -4167,7 +3841,7 @@ class JSCodeGen()(using genCtx: Context) { js.MethodIdent(selectedValueMethodName), Nil)(jstpe.AnyType) // Extract the method name as a String - val methodNameStr = args.head match { + val methodNameStr = args.head match case Literal(Constants.Constant(name: String)) => name case _ => @@ -4177,16 +3851,15 @@ class JSCodeGen()(using genCtx: Context) { "Other uses are not supported in Scala.js.", args.head.sourcePos) "erroneous" - } - val (formalParamTypeRefs, actualArgs) = if (isSelectDynamic) { + val (formalParamTypeRefs, actualArgs) = if (isSelectDynamic) (Nil, Nil) - } else { + else // Extract the param type refs and actual args from the 2nd and 3rd argument to applyDynamic - args.tail match { + args.tail match case WrapArray(classOfsArray: JavaSeqLiteral) :: WrapArray(actualArgsAnyArray: JavaSeqLiteral) :: Nil => // Extract jstpe.Type's and jstpe.TypeRef's from the classOf[_] trees - val formalParamTypesAndTypeRefs = classOfsArray.elems.map { + val formalParamTypesAndTypeRefs = classOfsArray.elems.map: // classOf[tp] -> tp case Literal(const) if const.tag == Constants.ClazzTag => toIRTypeAndTypeRef(const.typeValue) @@ -4198,14 +3871,12 @@ class JSCodeGen()(using genCtx: Context) { "Other uses are not supported in Scala.js.", otherTree.sourcePos) (jstpe.AnyType, jstpe.ClassRef(jsNames.ObjectClass)) - } // Gen the actual args, downcasting them to the formal param types - val actualArgs = actualArgsAnyArray.elems.zip(formalParamTypesAndTypeRefs).map { + val actualArgs = actualArgsAnyArray.elems.zip(formalParamTypesAndTypeRefs).map: (actualArgAny, formalParamTypeAndTypeRef) => val genActualArgAny = genExpr(actualArgAny) genAsInstanceOf(genActualArgAny, formalParamTypeAndTypeRef._1)(genActualArgAny.pos) - } (formalParamTypesAndTypeRefs.map(pair => toParamOrResultTypeRef(pair._2)), actualArgs) @@ -4215,13 +3886,10 @@ class JSCodeGen()(using genCtx: Context) { "is not supported in Scala.js.", tree.sourcePos) (Nil, Nil) - } - } val methodName = MethodName.reflectiveProxy(methodNameStr, formalParamTypeRefs) js.Apply(js.ApplyFlags.empty, selectedValueTree, js.MethodIdent(methodName), actualArgs)(jstpe.AnyType) - } /** Gen actual actual arguments to Scala method call. * Returns a list of the transformed arguments. @@ -4230,7 +3898,7 @@ class JSCodeGen()(using genCtx: Context) { * into js.WrappedArray instead of Scala wrapped arrays. */ private def genActualArgs(sym: Symbol, args: List[Tree])( - implicit pos: Position): List[js.Tree] = { + implicit pos: Position): List[js.Tree] = args.map(genExpr) /*val wereRepeated = exitingPhase(currentRun.typerPhase) { sym.tpe.params.map(p => isScalaRepeatedParamType(p.tpe)) @@ -4257,7 +3925,6 @@ class JSCodeGen()(using genCtx: Context) { } } }*/ - } /** Gen actual actual arguments to a JS method call. * Returns a list of the transformed arguments. @@ -4271,28 +3938,25 @@ class JSCodeGen()(using genCtx: Context) { * wrapped in a [[js.JSSpread]] node to be expanded at runtime. */ private def genActualJSArgs(sym: Symbol, args: List[Tree])( - implicit pos: Position): List[js.TreeOrJSSpread] = { + implicit pos: Position): List[js.TreeOrJSSpread] = var reversedArgs: List[js.TreeOrJSSpread] = Nil - for ((arg, info) <- args.zip(sym.jsParamInfos)) { - if (info.repeated) { + for ((arg, info) <- args.zip(sym.jsParamInfos)) + if (info.repeated) reversedArgs = genJSRepeatedParam(arg) reverse_::: reversedArgs - } else if (info.capture) { + else if (info.capture) // Ignore captures assert(sym.isClassConstructor, i"Found a capture param in method ${sym.fullName}, which is not a class constructor, at $pos") - } else { + else val unboxedArg = genExpr(arg) - val boxedArg = unboxedArg match { + val boxedArg = unboxedArg match case js.Transient(UndefinedParam) => unboxedArg case _ => box(unboxedArg, info.info) - } reversedArgs ::= boxedArg - } - } /* Remove all consecutive UndefinedParam's at the end of the argument * list. No check is performed whether they may be there, since they will @@ -4303,13 +3967,11 @@ class JSCodeGen()(using genCtx: Context) { /* Find remaining UndefinedParam and replace by js.Undefined. This can * happen with named arguments or with multiple argument lists. */ - reversedArgs = reversedArgs map { + reversedArgs = reversedArgs map: case js.Transient(UndefinedParam) => js.Undefined() case arg => arg - } reversedArgs.reverse - } /** Gen JS code for a repeated param of a JS method. * @@ -4324,8 +3986,8 @@ class JSCodeGen()(using genCtx: Context) { * Otherwise, it returns a `JSSpread` with the `Seq` converted to a * `js.Array`. */ - private def genJSRepeatedParam(arg: Tree): List[js.TreeOrJSSpread] = { - tryGenRepeatedParamAsJSArray(arg, handleNil = true).getOrElse { + private def genJSRepeatedParam(arg: Tree): List[js.TreeOrJSSpread] = + tryGenRepeatedParamAsJSArray(arg, handleNil = true).getOrElse: /* Fall back to calling runtime.genTraversableOnce2jsArray * to perform the conversion to js.Array, then wrap in a Spread * operator. @@ -4335,8 +3997,6 @@ class JSCodeGen()(using genCtx: Context) { jsdefn.Runtime_toJSVarArgs, List(genExpr(arg))) List(js.JSSpread(jsArrayArg)) - } - } /** Try and expand an actual argument to a repeated param `(xs: T*)`. * @@ -4346,11 +4006,11 @@ class JSCodeGen()(using genCtx: Context) { * method returns `None`. */ private def tryGenRepeatedParamAsJSArray(arg: Tree, - handleNil: Boolean): Option[List[js.Tree]] = { + handleNil: Boolean): Option[List[js.Tree]] = implicit val pos = arg.span // Given a method `def foo(args: T*)` - arg match { + arg match // foo(arg1, arg2, ..., argN) where N > 0 case MaybeAsInstanceOf(WrapArray(MaybeAsInstanceOf(array: JavaSeqLiteral))) => /* Value classes in arrays are already boxed, so no need to use @@ -4366,42 +4026,35 @@ class JSCodeGen()(using genCtx: Context) { // foo(argSeq: _*) - cannot be optimized case _ => None - } - } - private object MaybeAsInstanceOf { - def unapply(tree: Tree): Some[Tree] = tree match { + private object MaybeAsInstanceOf: + def unapply(tree: Tree): Some[Tree] = tree match case TypeApply(asInstanceOf_? @ Select(base, _), _) if asInstanceOf_?.symbol == defn.Any_asInstanceOf => Some(base) case _ => Some(tree) - } - } - private object WrapArray { - lazy val isWrapArray: Set[Symbol] = { + private object WrapArray: + lazy val isWrapArray: Set[Symbol] = val names0 = defn.ScalaValueClasses().map(sym => nme.wrapXArray(sym.name)) val names1 = names0 ++ Set(nme.wrapRefArray, nme.genericWrapArray) val symsInPredef = names1.map(defn.ScalaPredefModule.requiredMethod(_)) val symsInScalaRunTime = names1.map(defn.ScalaRuntimeModule.requiredMethod(_)) (symsInPredef ++ symsInScalaRunTime).toSet - } - def unapply(tree: Apply): Option[Tree] = tree match { + def unapply(tree: Apply): Option[Tree] = tree match case Apply(wrapArray_?, List(wrapped)) if isWrapArray(wrapArray_?.symbol) => Some(wrapped) case _ => None - } - } /** Wraps a `js.Array` to use as varargs. */ def genJSArrayToVarArgs(arrayRef: js.Tree)(implicit pos: SourcePosition): js.Tree = genModuleApplyMethod(jsdefn.Runtime_toScalaVarArgs, List(arrayRef)) /** Gen the actual capture values for a JS constructor based on its fake `new` invocation. */ - private def genCaptureValuesFromFakeNewInstance(tree: Tree): List[js.Tree] = { + private def genCaptureValuesFromFakeNewInstance(tree: Tree): List[js.Tree] = implicit val pos: Position = tree.span val Apply(fun @ Select(New(_), _), args) = tree: @unchecked @@ -4411,48 +4064,43 @@ class JSCodeGen()(using genCtx: Context) { * introduced by explicitouter or lambdalift (but reversed, of course). */ - val existedBeforeUncurry = atPhase(elimRepeatedPhase) { + val existedBeforeUncurry = atPhase(elimRepeatedPhase): sym.info.paramNamess.flatten.toSet - } for { (arg, paramName) <- args.zip(sym.info.paramNamess.flatten) if !existedBeforeUncurry(paramName) - } yield { + } yield genExpr(arg) - } - } private def genVarRef(sym: Symbol)(implicit pos: Position): js.VarRef = js.VarRef(encodeLocalSym(sym))(toIRType(sym.info)) - private def genAssignableField(sym: Symbol, qualifier: Tree)(implicit pos: SourcePosition): (js.AssignLhs, Boolean) = { + private def genAssignableField(sym: Symbol, qualifier: Tree)(implicit pos: SourcePosition): (js.AssignLhs, Boolean) = def qual = genExpr(qualifier) - if (sym.owner.isNonNativeJSClass) { - val f = if (sym.isJSExposed) { + if (sym.owner.isNonNativeJSClass) + val f = if (sym.isJSExposed) js.JSSelect(qual, genExpr(sym.jsName)) - } else if (sym.owner.isAnonymousClass) { + else if (sym.owner.isAnonymousClass) js.JSSelect( js.JSSelect(qual, genPrivateFieldsSymbol()), encodeFieldSymAsStringLiteral(sym)) - } else { + else js.JSPrivateSelect(qual, encodeClassName(sym.owner), encodeFieldSym(sym)) - } (f, true) - } else if (sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot)) { + else if (sym.hasAnnotation(jsdefn.JSExportTopLevelAnnot)) val f = js.SelectStatic(encodeClassName(sym.owner), encodeFieldSym(sym))(jstpe.AnyType) (f, true) - } else if (sym.hasAnnotation(jsdefn.JSExportStaticAnnot)) { - val jsName = sym.getAnnotation(jsdefn.JSExportStaticAnnot).get.argumentConstantString(0).getOrElse { + else if (sym.hasAnnotation(jsdefn.JSExportStaticAnnot)) + val jsName = sym.getAnnotation(jsdefn.JSExportStaticAnnot).get.argumentConstantString(0).getOrElse: sym.defaultJSName - } val companionClass = sym.owner.linkedClass val f = js.JSSelect(genLoadJSConstructor(companionClass), js.StringLiteral(jsName)) (f, true) - } else { + else val className = encodeClassName(sym.owner) val fieldIdent = encodeFieldSym(sym) @@ -4474,28 +4122,24 @@ class JSCodeGen()(using genCtx: Context) { js.Select(qual, className, fieldIdent)(irType) (f, boxed) - } - } /** Gen JS code for loading a Java static field. */ - private def genLoadStaticField(sym: Symbol)(implicit pos: SourcePosition): js.Tree = { + private def genLoadStaticField(sym: Symbol)(implicit pos: SourcePosition): js.Tree = /* Actually, there is no static member in Scala.js. If we come here, that * is because we found the symbol in a Java-emitted .class in the * classpath. But the corresponding implementation in Scala.js will * actually be a val in the companion module. */ - if (sym == defn.BoxedUnit_UNIT) { + if (sym == defn.BoxedUnit_UNIT) js.Undefined() - } else if (sym == defn.BoxedUnit_TYPE) { + else if (sym == defn.BoxedUnit_TYPE) js.ClassOf(jstpe.VoidRef) - } else { + else val className = encodeClassName(sym.owner) val method = encodeStaticMemberSym(sym) js.ApplyStatic(js.ApplyFlags.empty, className, method, Nil)(toIRType(sym.info)) - } - } /** Generates a call to `runtime.privateFieldsSymbol()` */ private def genPrivateFieldsSymbol()(implicit pos: SourcePosition): js.Tree = @@ -4521,63 +4165,56 @@ class JSCodeGen()(using genCtx: Context) { * refers to the global scope. */ def genLoadModuleOrGlobalScope(sym0: Symbol)( - implicit pos: SourcePosition): MaybeGlobalScope = { + implicit pos: SourcePosition): MaybeGlobalScope = require(sym0.is(Module), "genLoadModule called with non-module symbol: " + sym0) val sym = if (sym0.isTerm) sym0.moduleClass else sym0 // Does that module refer to the global scope? - if (sym.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { + if (sym.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) MaybeGlobalScope.GlobalScope(pos) - } else { + else val cls = encodeClassName(sym) val tree = if (sym.isJSType) js.LoadJSModule(cls) else js.LoadModule(cls) MaybeGlobalScope.NotGlobalScope(tree) - } - } /** Gen JS code representing the constructor of a JS class. */ private def genLoadJSConstructor(sym: Symbol)( - implicit pos: Position): js.Tree = { + implicit pos: Position): js.Tree = assert(!isStaticModule(sym) && !sym.is(Trait), s"genLoadJSConstructor called with non-class $sym") js.LoadJSConstructor(encodeClassName(sym)) - } - private inline val GenericGlobalObjectInformationMsg = { + private inline val GenericGlobalObjectInformationMsg = "\n " + "See https://www.scala-js.org/doc/interoperability/global-scope.html " + "for further information." - } /** Rule out the `GlobalScope` case of a `MaybeGlobalScope` and extract the * value tree. * * If `tree` represents the global scope, report a compile error. */ - private def ruleOutGlobalScope(tree: MaybeGlobalScope): js.Tree = { - tree match { + private def ruleOutGlobalScope(tree: MaybeGlobalScope): js.Tree = + tree match case MaybeGlobalScope.NotGlobalScope(t) => t case MaybeGlobalScope.GlobalScope(pos) => reportErrorLoadGlobalScope()(pos) - } - } /** Report a compile error specifying that the global scope cannot be * loaded as a value. */ - private def reportErrorLoadGlobalScope()(implicit pos: SourcePosition): js.Tree = { + private def reportErrorLoadGlobalScope()(implicit pos: SourcePosition): js.Tree = report.error( "Loading the global scope as a value (anywhere but as the " + "left-hand-side of a `.`-selection) is not allowed." + GenericGlobalObjectInformationMsg, pos) js.Undefined() - } /** Gen a JS bracket select or a `JSGlobalRef`. * @@ -4590,31 +4227,30 @@ class JSCodeGen()(using genCtx: Context) { * Otherwise, report a compile error. */ private def genJSSelectOrGlobalRef(qual: MaybeGlobalScope, item: js.Tree)( - implicit pos: SourcePosition): js.AssignLhs = { - qual match { + implicit pos: SourcePosition): js.AssignLhs = + qual match case MaybeGlobalScope.NotGlobalScope(qualTree) => js.JSSelect(qualTree, item) case MaybeGlobalScope.GlobalScope(_) => - item match { + item match case js.StringLiteral(value) => - if (js.JSGlobalRef.isValidJSGlobalRefName(value)) { + if (js.JSGlobalRef.isValidJSGlobalRefName(value)) js.JSGlobalRef(value) - } else if (js.JSGlobalRef.ReservedJSIdentifierNames.contains(value)) { + else if (js.JSGlobalRef.ReservedJSIdentifierNames.contains(value)) report.error( "Invalid selection in the global scope of the reserved " + s"identifier name `$value`." + GenericGlobalObjectInformationMsg, pos) js.JSGlobalRef("erroneous") - } else { + else report.error( "Selecting a field of the global scope whose name is " + "not a valid JavaScript identifier is not allowed." + GenericGlobalObjectInformationMsg, pos) js.JSGlobalRef("erroneous") - } case _ => report.error( @@ -4623,9 +4259,6 @@ class JSCodeGen()(using genCtx: Context) { GenericGlobalObjectInformationMsg, pos) js.JSGlobalRef("erroneous") - } - } - } /** Gen a JS bracket method apply or an apply of a `GlobalRef`. * @@ -4639,31 +4272,30 @@ class JSCodeGen()(using genCtx: Context) { */ private def genJSMethodApplyOrGlobalRefApply( receiver: MaybeGlobalScope, method: js.Tree, args: List[js.TreeOrJSSpread])( - implicit pos: SourcePosition): js.Tree = { - receiver match { + implicit pos: SourcePosition): js.Tree = + receiver match case MaybeGlobalScope.NotGlobalScope(receiverTree) => js.JSMethodApply(receiverTree, method, args) case MaybeGlobalScope.GlobalScope(_) => - method match { + method match case js.StringLiteral(value) => - if (js.JSGlobalRef.isValidJSGlobalRefName(value)) { + if (js.JSGlobalRef.isValidJSGlobalRefName(value)) js.JSFunctionApply(js.JSGlobalRef(value), args) - } else if (js.JSGlobalRef.ReservedJSIdentifierNames.contains(value)) { + else if (js.JSGlobalRef.ReservedJSIdentifierNames.contains(value)) report.error( "Invalid call in the global scope of the reserved " + s"identifier name `$value`." + GenericGlobalObjectInformationMsg, pos) js.Undefined() - } else { + else report.error( "Calling a method of the global scope whose name is not " + "a valid JavaScript identifier is not allowed." + GenericGlobalObjectInformationMsg, pos) js.Undefined() - } case _ => report.error( @@ -4672,30 +4304,22 @@ class JSCodeGen()(using genCtx: Context) { GenericGlobalObjectInformationMsg, pos) js.Undefined() - } - } - } - private def computeJSNativeLoadSpecOfValDef(sym: Symbol): js.JSNativeLoadSpec = { - atPhaseBeforeTransforms { + private def computeJSNativeLoadSpecOfValDef(sym: Symbol): js.JSNativeLoadSpec = + atPhaseBeforeTransforms: computeJSNativeLoadSpecOfInPhase(sym) - } - } - private def computeJSNativeLoadSpecOfClass(sym: Symbol): Option[js.JSNativeLoadSpec] = { - if (sym.is(Trait) || sym.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { + private def computeJSNativeLoadSpecOfClass(sym: Symbol): Option[js.JSNativeLoadSpec] = + if (sym.is(Trait) || sym.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) None - } else { - atPhaseBeforeTransforms { + else + atPhaseBeforeTransforms: if (sym.owner.isStaticOwner) Some(computeJSNativeLoadSpecOfInPhase(sym)) else None - } - } - } - private def computeJSNativeLoadSpecOfInPhase(sym: Symbol)(using Context): js.JSNativeLoadSpec = { + private def computeJSNativeLoadSpecOfInPhase(sym: Symbol)(using Context): js.JSNativeLoadSpec = import js.JSNativeLoadSpec._ val symOwner = sym.owner @@ -4704,17 +4328,16 @@ class JSCodeGen()(using genCtx: Context) { def unexpected(msg: String): Nothing = throw new FatalError(i"$msg for ${sym.fullName} at ${sym.srcPos}") - if (symOwner.hasAnnotation(jsdefn.JSNativeAnnot)) { - val jsName = sym.jsName match { + if (symOwner.hasAnnotation(jsdefn.JSNativeAnnot)) + val jsName = sym.jsName match case JSName.Literal(jsName) => jsName case JSName.Computed(_) => unexpected("could not read the simple JS name as a string literal") - } - if (symOwner.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { + if (symOwner.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) Global(jsName, Nil) - } else { + else val ownerLoadSpec = computeJSNativeLoadSpecOfInPhase(symOwner) - ownerLoadSpec match { + ownerLoadSpec match case Global(globalRef, path) => Global(globalRef, path :+ jsName) case Import(module, path) => @@ -4723,32 +4346,26 @@ class JSCodeGen()(using genCtx: Context) { ImportWithGlobalFallback( Import(module, modulePath :+ jsName), Global(globalRef, globalPath :+ jsName)) - } - } - } else { + else def parsePath(pathName: String): List[String] = pathName.split('.').toList - def parseGlobalPath(pathName: String): Global = { + def parseGlobalPath(pathName: String): Global = val globalRef :: path = parsePath(pathName): @unchecked Global(globalRef, path) - } val annot = sym.annotations.find { annot => annot.symbol == jsdefn.JSGlobalAnnot || annot.symbol == jsdefn.JSImportAnnot - }.getOrElse { + }.getOrElse: unexpected("could not find the JS native load spec annotation") - } - if (annot.symbol == jsdefn.JSGlobalAnnot) { - val pathName = annot.argumentConstantString(0).getOrElse { + if (annot.symbol == jsdefn.JSGlobalAnnot) + val pathName = annot.argumentConstantString(0).getOrElse: sym.defaultJSName - } parseGlobalPath(pathName) - } else { // annot.symbol == jsdefn.JSImportAnnot - val module = annot.argumentConstantString(0).getOrElse { + else // annot.symbol == jsdefn.JSImportAnnot + val module = annot.argumentConstantString(0).getOrElse: unexpected("could not read the module argument as a string literal") - } val path = annot.argumentConstantString(1).fold { if (annot.arguments.sizeIs < 2) parsePath(sym.defaultJSName) @@ -4763,9 +4380,6 @@ class JSCodeGen()(using genCtx: Context) { } { globalPathName => ImportWithGlobalFallback(importSpec, parseGlobalPath(globalPathName)) } - } - } - } private def isMethodStaticInIR(sym: Symbol): Boolean = sym.is(JavaStatic) @@ -4777,15 +4391,13 @@ class JSCodeGen()(using genCtx: Context) { private def isStaticModule(sym: Symbol): Boolean = sym.is(Module) && sym.isStatic - private def isPrimitiveValueType(tpe: Type): Boolean = { - tpe.widenDealias match { + private def isPrimitiveValueType(tpe: Type): Boolean = + tpe.widenDealias match case JavaArrayType(_) => false case _: ErasedValueType => false case t => t.typeSymbol.asClass.isPrimitiveValueClass - } - } - protected lazy val isHijackedClass: Set[Symbol] = { + protected lazy val isHijackedClass: Set[Symbol] = /* This list is a duplicate of ir.Definitions.HijackedClasses, but * with global.Symbol's instead of IR encoded names as Strings. * We also add java.lang.Void, which BoxedUnit "erases" to. @@ -4795,39 +4407,33 @@ class JSCodeGen()(using genCtx: Context) { defn.BoxedShortClass, defn.BoxedIntClass, defn.BoxedLongClass, defn.BoxedFloatClass, defn.BoxedDoubleClass, defn.StringClass, jsdefn.JavaLangVoidClass ) - } private def isMaybeJavaScriptException(tpe: Type): Boolean = jsdefn.JavaScriptExceptionClass.isSubClass(tpe.typeSymbol) - private def hasDefaultCtorArgsAndJSModule(classSym: Symbol): Boolean = { + private def hasDefaultCtorArgsAndJSModule(classSym: Symbol): Boolean = def hasNativeCompanion = classSym.companionModule.moduleClass.hasAnnotation(jsdefn.JSNativeAnnot) def hasDefaultParameters = classSym.info.decls.exists(sym => sym.isClassConstructor && sym.hasDefaultParams) hasNativeCompanion && hasDefaultParameters - } // Copied from DottyBackendInterface private val desugared = new java.util.IdentityHashMap[Type, tpd.Select] - def desugarIdent(i: Ident): Option[tpd.Select] = { + def desugarIdent(i: Ident): Option[tpd.Select] = var found = desugared.get(i.tpe) - if (found == null) { - tpd.desugarIdent(i) match { + if (found == null) + tpd.desugarIdent(i) match case sel: tpd.Select => desugared.put(i.tpe, sel) found = sel case _ => - } - } if (found == null) None else Some(found) - } -} -object JSCodeGen { +object JSCodeGen: private val NullPointerExceptionClass = ClassName("java.lang.NullPointerException") private val JSObjectClassName = ClassName("scala.scalajs.js.Object") @@ -4845,35 +4451,32 @@ object JSCodeGen { sealed abstract class MaybeGlobalScope - object MaybeGlobalScope { + object MaybeGlobalScope: final case class NotGlobalScope(tree: js.Tree) extends MaybeGlobalScope final case class GlobalScope(pos: SourcePosition) extends MaybeGlobalScope - } /** Marker object for undefined parameters in JavaScript semantic calls. * * To be used inside a `js.Transient` node. */ - case object UndefinedParam extends js.Transient.Value { + case object UndefinedParam extends js.Transient.Value: val tpe: jstpe.Type = jstpe.UndefType def traverse(traverser: ir.Traversers.Traverser): Unit = () def transform(transformer: ir.Transformers.Transformer, isStat: Boolean)( - implicit pos: ir.Position): js.Tree = { + implicit pos: ir.Position): js.Tree = js.Transient(this) - } def printIR(out: ir.Printers.IRTreePrinter): Unit = out.print("") - } /** Info about a default param accessor. * * The method must have a default getter name for this class to make sense. */ - private class DefaultParamInfo(sym: Symbol)(using Context) { + private class DefaultParamInfo(sym: Symbol)(using Context): private val methodName = sym.name.exclude(DefaultGetterName) def isForConstructor: Boolean = methodName == nme.CONSTRUCTOR @@ -4886,14 +4489,11 @@ object JSCodeGen { /** When `isForConstructor` is false, returns the method attached to the * specified default accessor. */ - def attachedMethod: Symbol = { + def attachedMethod: Symbol = // If there are overloads, we need to find the one that has default params. val overloads = sym.owner.info.decl(methodName) if (!overloads.isOverloaded) overloads.symbol else overloads.suchThat(_.is(HasDefaultParams, butNot = Bridge)).symbol - } - } -} diff --git a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala index 5336d60129ac..b71d90417b72 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSDefinitions.scala @@ -13,13 +13,12 @@ import StdNames._ import dotty.tools.dotc.config.SJSPlatform -object JSDefinitions { +object JSDefinitions: /** The Scala.js-specific definitions for the current context. */ def jsdefn(using Context): JSDefinitions = ctx.platform.asInstanceOf[SJSPlatform].jsDefinitions -} -final class JSDefinitions()(using Context) { +final class JSDefinitions()(using Context): @threadUnsafe lazy val InlineAnnotType: TypeRef = requiredClassRef("scala.inline") def InlineAnnot(using Context) = InlineAnnotType.symbol.asClass @@ -250,20 +249,18 @@ final class JSDefinitions()(using Context) { def Selectable_reflectiveSelectableFromLangReflectiveCalls(using Context) = Selectable_reflectiveSelectableFromLangReflectiveCallsR.symbol private var allRefClassesCache: Set[Symbol] = _ - def allRefClasses(using Context): Set[Symbol] = { - if (allRefClassesCache == null) { + def allRefClasses(using Context): Set[Symbol] = + if (allRefClassesCache == null) val baseNames = List("Object", "Boolean", "Character", "Byte", "Short", "Int", "Long", "Float", "Double") val fullNames = baseNames.flatMap { base => List(s"scala.runtime.${base}Ref", s"scala.runtime.Volatile${base}Ref") } allRefClassesCache = fullNames.map(name => requiredClass(name)).toSet - } allRefClassesCache - } /** Definitions related to scala.Enumeration. */ - object scalaEnumeration { + object scalaEnumeration: val nmeValue = termName("Value") val nmeVal = termName("Val") val hasNext = termName("hasNext") @@ -299,10 +296,9 @@ final class JSDefinitions()(using Context) { def isValCtorName(sym: Symbol)(using Context): Boolean = isValCtor(sym) && (sym == Enumeration_Val_StringArg || sym == Enumeration_Val_IntStringArg) - } /** Definitions related to the treatment of JUnit bootstrappers. */ - object junit { + object junit: @threadUnsafe lazy val TestAnnotType: TypeRef = requiredClassRef("org.junit.Test") def TestAnnotClass(using Context): ClassSymbol = TestAnnotType.symbol.asClass @@ -335,6 +331,4 @@ final class JSDefinitions()(using Context) { @threadUnsafe private lazy val SuccessModule_applyR = requiredModule("scala.util.Success").requiredMethodRef(nme.apply) def SuccessModule_apply(using Context): Symbol = SuccessModule_applyR.symbol - } -} diff --git a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala index 73a150c60290..22c58b2e1797 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSEncoding.scala @@ -35,7 +35,7 @@ import JSDefinitions.jsdefn * * @author Sébastien Doeraene */ -object JSEncoding { +object JSEncoding: /** Name of the capture param storing the JS super class. * @@ -60,7 +60,7 @@ object JSEncoding { // Fresh local name generator ---------------------------------------------- - class LocalNameGenerator { + class LocalNameGenerator: import LocalNameGenerator._ private val usedLocalNames = mutable.Set.empty[LocalName] @@ -69,24 +69,21 @@ object JSEncoding { private val labelSymbolNames = mutable.Map.empty[Symbol, LabelName] private var returnLabelName: Option[LabelName] = None - def reserveLocalName(name: LocalName): Unit = { + def reserveLocalName(name: LocalName): Unit = require(usedLocalNames.isEmpty, s"Trying to reserve the name '$name' but names have already been allocated") usedLocalNames += name - } private def freshNameGeneric[N <: ir.Names.Name](base: N, usedNamesSet: mutable.Set[N])( - withSuffix: (N, String) => N): N = { + withSuffix: (N, String) => N): N = var suffix = 1 var result = base - while (usedNamesSet(result)) { + while (usedNamesSet(result)) suffix += 1 result = withSuffix(base, "$" + suffix) - } usedNamesSet += result result - } def freshName(base: LocalName): LocalName = freshNameGeneric(base, usedLocalNames)(_.withSuffix(_)) @@ -106,7 +103,7 @@ object JSEncoding { def freshLocalIdent(base: TermName)(implicit pos: ir.Position): js.LocalIdent = freshLocalIdent(base.mangledString) - def localSymbolName(sym: Symbol)(using Context): LocalName = { + def localSymbolName(sym: Symbol)(using Context): LocalName = localSymbolNames.getOrElseUpdate(sym, { /* The emitter does not like local variables that start with a '$', * because it needs to encode them not to clash with emitter-generated @@ -120,14 +117,12 @@ object JSEncoding { * case. It can still happen for user-defined variables, but in that case * the emitter will deal with it. */ - val base = sym.name match { + val base = sym.name match case nme.SELF => "this$" // instead of $this case nme.OUTER => "outer" // instead of $outer case name => name.mangledString - } freshName(base) }) - } def freshLabelName(base: LabelName): LabelName = freshNameGeneric(base, usedLabelNames)(_.withSuffix(_)) @@ -141,36 +136,30 @@ object JSEncoding { def labelSymbolName(sym: Symbol)(using Context): LabelName = labelSymbolNames.getOrElseUpdate(sym, freshLabelName(sym.javaSimpleName)) - def getEnclosingReturnLabel()(implicit pos: ir.Position): js.LabelIdent = { + def getEnclosingReturnLabel()(implicit pos: ir.Position): js.LabelIdent = if (returnLabelName.isEmpty) returnLabelName = Some(freshLabelName("_return")) js.LabelIdent(returnLabelName.get) - } /* If this `LocalNameGenerator` has a `returnLabelName` (often added in the * construction of the `body` argument), wrap the resulting js.Tree to use that label. */ - def makeLabeledIfRequiresEnclosingReturn(tpe: jstpe.Type)(body: js.Tree)(implicit pos: ir.Position): js.Tree = { - returnLabelName match { + def makeLabeledIfRequiresEnclosingReturn(tpe: jstpe.Type)(body: js.Tree)(implicit pos: ir.Position): js.Tree = + returnLabelName match case None => body case Some(labelName) => js.Labeled(js.LabelIdent(labelName), tpe, body) - } - } - } - private object LocalNameGenerator { + private object LocalNameGenerator: private val xLocalName = LocalName("x") - } // Encoding methods ---------------------------------------------------------- def encodeLabelSym(sym: Symbol)( - implicit ctx: Context, pos: ir.Position, localNames: LocalNameGenerator): js.LabelIdent = { + implicit ctx: Context, pos: ir.Position, localNames: LocalNameGenerator): js.LabelIdent = require(sym.is(Flags.Label), "encodeLabelSym called with non-label symbol: " + sym) js.LabelIdent(localNames.labelSymbolName(sym)) - } def encodeFieldSym(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.FieldIdent = js.FieldIdent(FieldName(encodeFieldSymAsString(sym))) @@ -178,17 +167,16 @@ object JSEncoding { def encodeFieldSymAsStringLiteral(sym: Symbol)(implicit ctx: Context, pos: ir.Position): js.StringLiteral = js.StringLiteral(encodeFieldSymAsString(sym)) - private def encodeFieldSymAsString(sym: Symbol)(using Context): String = { + private def encodeFieldSymAsString(sym: Symbol)(using Context): String = require(sym.owner.isClass && sym.isTerm && !sym.isOneOf(MethodOrModule), "encodeFieldSym called with non-field symbol: " + sym) val name0 = sym.javaSimpleName if (name0.charAt(name0.length() - 1) != ' ') name0 else name0.substring(0, name0.length() - 1) - } def encodeMethodSym(sym: Symbol, reflProxy: Boolean = false)( - implicit ctx: Context, pos: ir.Position): js.MethodIdent = { + implicit ctx: Context, pos: ir.Position): js.MethodIdent = require(sym.is(Flags.Method), "encodeMethodSym called with non-method symbol: " + sym) val tpe = sym.info @@ -203,67 +191,58 @@ object JSEncoding { val name = sym.name val simpleName = SimpleMethodName(name.mangledString) - val methodName = { + val methodName = if (sym.isClassConstructor) MethodName.constructor(paramTypeRefs) else if (reflProxy) MethodName.reflectiveProxy(simpleName, paramTypeRefs) else MethodName(simpleName, paramTypeRefs, paramOrResultTypeRef(patchedResultType(sym))) - } js.MethodIdent(methodName) - } - def encodeJSNativeMemberSym(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { + def encodeJSNativeMemberSym(sym: Symbol)(using Context, ir.Position): js.MethodIdent = require(sym.hasAnnotation(jsdefn.JSNativeAnnot), "encodeJSNativeMemberSym called with non-native symbol: " + sym) if (sym.is(Method)) encodeMethodSym(sym) else encodeFieldSymAsMethod(sym) - } - def encodeStaticMemberSym(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { + def encodeStaticMemberSym(sym: Symbol)(using Context, ir.Position): js.MethodIdent = require(sym.is(Flags.JavaStaticTerm), "encodeStaticMemberSym called with non-static symbol: " + sym) encodeFieldSymAsMethod(sym) - } - private def encodeFieldSymAsMethod(sym: Symbol)(using Context, ir.Position): js.MethodIdent = { + private def encodeFieldSymAsMethod(sym: Symbol)(using Context, ir.Position): js.MethodIdent = val name = sym.name val resultTypeRef = paramOrResultTypeRef(sym.info) val methodName = MethodName(name.mangledString, Nil, resultTypeRef) js.MethodIdent(methodName) - } - def encodeDynamicImportForwarderIdent(params: List[Symbol])(using Context, ir.Position): js.MethodIdent = { + def encodeDynamicImportForwarderIdent(params: List[Symbol])(using Context, ir.Position): js.MethodIdent = val paramTypeRefs = params.map(sym => paramOrResultTypeRef(sym.info)) val resultTypeRef = jstpe.ClassRef(ir.Names.ObjectClass) val methodName = MethodName(dynamicImportForwarderSimpleName, paramTypeRefs, resultTypeRef) js.MethodIdent(methodName) - } /** Computes the type ref for a type, to be used in a method signature. */ private def paramOrResultTypeRef(tpe: Type)(using Context): jstpe.TypeRef = toParamOrResultTypeRef(toTypeRef(tpe)) def encodeLocalSym(sym: Symbol)( - implicit ctx: Context, pos: ir.Position, localNames: LocalNameGenerator): js.LocalIdent = { + implicit ctx: Context, pos: ir.Position, localNames: LocalNameGenerator): js.LocalIdent = require(!sym.owner.isClass && sym.isTerm && !sym.is(Flags.Method) && !sym.is(Flags.Module), "encodeLocalSym called with non-local symbol: " + sym) js.LocalIdent(localNames.localSymbolName(sym)) - } - def encodeClassType(sym: Symbol)(using Context): jstpe.Type = { + def encodeClassType(sym: Symbol)(using Context): jstpe.Type = if (sym == defn.ObjectClass) jstpe.AnyType else if (sym.isJSType) jstpe.AnyType - else { + else assert(sym != defn.ArrayClass, "encodeClassType() cannot be called with ArrayClass") jstpe.ClassType(encodeClassName(sym)) - } - } def encodeClassRef(sym: Symbol)(using Context): jstpe.ClassRef = jstpe.ClassRef(encodeClassName(sym)) @@ -272,7 +251,7 @@ object JSEncoding { implicit ctx: Context, pos: ir.Position): js.ClassIdent = js.ClassIdent(encodeClassName(sym)) - def encodeClassName(sym: Symbol)(using Context): ClassName = { + def encodeClassName(sym: Symbol)(using Context): ClassName = val sym1 = if (sym.isAllOf(ModuleClass | JavaDefined)) sym.linkedClass else sym @@ -291,27 +270,23 @@ object JSEncoding { ScalaRuntimeNullClassName else ClassName(sym1.javaClassName) - } /** Converts a general TypeRef to a TypeRef to be used in a method signature. */ - def toParamOrResultTypeRef(typeRef: jstpe.TypeRef): jstpe.TypeRef = { - typeRef match { + def toParamOrResultTypeRef(typeRef: jstpe.TypeRef): jstpe.TypeRef = + typeRef match case jstpe.ClassRef(ScalaRuntimeNullClassName) => jstpe.NullRef case jstpe.ClassRef(ScalaRuntimeNothingClassName) => jstpe.NothingRef case _ => typeRef - } - } - def toIRTypeAndTypeRef(tp: Type)(using Context): (jstpe.Type, jstpe.TypeRef) = { + def toIRTypeAndTypeRef(tp: Type)(using Context): (jstpe.Type, jstpe.TypeRef) = val typeRefInternal = toTypeRefInternal(tp) (toIRTypeInternal(typeRefInternal), typeRefInternal._1) - } def toIRType(tp: Type)(using Context): jstpe.Type = toIRTypeInternal(toTypeRefInternal(tp)) - private def toIRTypeInternal(typeRefInternal: (jstpe.TypeRef, Symbol))(using Context): jstpe.Type = { - typeRefInternal._1 match { + private def toIRTypeInternal(typeRefInternal: (jstpe.TypeRef, Symbol))(using Context): jstpe.Type = + typeRefInternal._1 match case jstpe.PrimRef(irTpe) => irTpe @@ -328,17 +303,15 @@ object JSEncoding { case typeRef: jstpe.ArrayTypeRef => jstpe.ArrayType(typeRef) - } - } def toTypeRef(tp: Type)(using Context): jstpe.TypeRef = toTypeRefInternal(tp)._1 - private def toTypeRefInternal(tp: Type)(using Context): (jstpe.TypeRef, Symbol) = { - def primitiveOrClassToTypeRef(sym: Symbol): (jstpe.TypeRef, Symbol) = { + private def toTypeRefInternal(tp: Type)(using Context): (jstpe.TypeRef, Symbol) = + def primitiveOrClassToTypeRef(sym: Symbol): (jstpe.TypeRef, Symbol) = assert(sym.isClass, sym) //assert(sym != defn.ArrayClass || isCompilingArray, sym) - val typeRef = if (sym.isPrimitiveValueClass) { + val typeRef = if (sym.isPrimitiveValueClass) if (sym == defn.UnitClass) jstpe.VoidRef else if (sym == defn.BooleanClass) jstpe.BooleanRef else if (sym == defn.CharClass) jstpe.CharRef @@ -349,22 +322,19 @@ object JSEncoding { else if (sym == defn.FloatClass) jstpe.FloatRef else if (sym == defn.DoubleClass) jstpe.DoubleRef else throw new Exception(s"unknown primitive value class $sym") - } else { + else encodeClassRef(sym) - } (typeRef, sym) - } /** * When compiling Array.scala, the type parameter T is not erased and shows up in method * signatures, e.g. `def apply(i: Int): T`. A TyperRef to T is replaced by ObjectReference. */ - def nonClassTypeRefToTypeRef(sym: Symbol): (jstpe.TypeRef, Symbol) = { + def nonClassTypeRefToTypeRef(sym: Symbol): (jstpe.TypeRef, Symbol) = //assert(sym.isType && isCompilingArray, sym) (jstpe.ClassRef(ir.Names.ObjectClass), defn.ObjectClass) - } - tp.widenDealias match { + tp.widenDealias match // Array type such as Array[Int] (kept by erasure) case JavaArrayType(el) => val elTypeRef = toTypeRefInternal(el) @@ -387,8 +357,6 @@ object JSEncoding { case a @ AnnotatedType(t, _) => //debuglog(s"typeKind of annotated type $a") toTypeRefInternal(t) - } - } /** Patches the result type of a method symbol to sanitize it. * @@ -404,12 +372,11 @@ object JSEncoding { else sym.info.resultType def originalNameOfLocal(sym: Symbol)( - implicit ctx: Context, localNames: LocalNameGenerator): OriginalName = { + implicit ctx: Context, localNames: LocalNameGenerator): OriginalName = val irName = localNames.localSymbolName(sym) val originalName = UTF8String(sym.name.unexpandedName.toString) if (UTF8String.equals(originalName, irName.encoded)) NoOriginalName else OriginalName(originalName) - } def originalNameOfField(sym: Symbol)(using Context): OriginalName = originalNameOf(sym.name) @@ -420,9 +387,7 @@ object JSEncoding { def originalNameOfClass(sym: Symbol)(using Context): OriginalName = originalNameOf(sym.fullName) - private def originalNameOf(name: Name): OriginalName = { + private def originalNameOf(name: Name): OriginalName = val originalName = name.unexpandedName.toString if (originalName == name.mangledString) NoOriginalName else OriginalName(originalName) - } -} diff --git a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala index 78412999bb34..ca5fcf7c77b5 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSExportsGen.scala @@ -33,21 +33,20 @@ import dotty.tools.dotc.transform.sjs.JSSymUtils._ import JSEncoding._ -final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { +final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context): import jsCodeGen._ import positionConversions._ /** Info for a non-member export. */ - sealed trait ExportInfo { + sealed trait ExportInfo: val pos: SourcePosition - } final case class TopLevelExportInfo(moduleID: String, jsName: String)(val pos: SourcePosition) extends ExportInfo final case class StaticExportInfo(jsName: String)(val pos: SourcePosition) extends ExportInfo private sealed trait ExportKind - private object ExportKind { + private object ExportKind: case object Module extends ExportKind case object JSClass extends ExportKind case object Constructor extends ExportKind @@ -55,100 +54,85 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { case object Property extends ExportKind case object Field extends ExportKind - def apply(sym: Symbol): ExportKind = { + def apply(sym: Symbol): ExportKind = if (sym.is(Flags.Module) && sym.isStatic) Module else if (sym.isClass) JSClass else if (sym.isConstructor) Constructor else if (!sym.is(Flags.Method)) Field else if (sym.isJSProperty) Property else Method - } - } - private def topLevelExportsOf(sym: Symbol): List[TopLevelExportInfo] = { + private def topLevelExportsOf(sym: Symbol): List[TopLevelExportInfo] = def isScalaClass(sym: Symbol): Boolean = sym.isClass && !sym.isOneOf(Module | Trait) && !sym.isJSType - if (isScalaClass(sym)) { + if (isScalaClass(sym)) // Scala classes are never exported; their constructors are Nil - } else if (sym.is(Accessor) || sym.is(Module, butNot = ModuleClass)) { + else if (sym.is(Accessor) || sym.is(Module, butNot = ModuleClass)) /* - Accessors receive the `@JSExportTopLevel` annotation of their associated field, * but only the field is really exported. * - Module values are not exported; their module class takes care of the export. */ Nil - } else { + else val symForAnnot = if (sym.isConstructor && isScalaClass(sym.owner)) sym.owner else sym - symForAnnot.annotations.collect { + symForAnnot.annotations.collect: case annot if annot.symbol == jsdefn.JSExportTopLevelAnnot => val jsName = annot.argumentConstantString(0).get val moduleID = annot.argumentConstantString(1).getOrElse(DefaultModuleID) TopLevelExportInfo(moduleID, jsName)(annot.tree.sourcePos) - } - } - } - private def staticExportsOf(sym: Symbol): List[StaticExportInfo] = { - if (sym.is(Accessor)) { + private def staticExportsOf(sym: Symbol): List[StaticExportInfo] = + if (sym.is(Accessor)) Nil - } else { - sym.annotations.collect { + else + sym.annotations.collect: case annot if annot.symbol == jsdefn.JSExportStaticAnnot => - val jsName = annot.argumentConstantString(0).getOrElse { + val jsName = annot.argumentConstantString(0).getOrElse: sym.defaultJSName - } StaticExportInfo(jsName)(annot.tree.sourcePos) - } - } - } - private def checkSameKind(tups: List[(ExportInfo, Symbol)]): Option[ExportKind] = { + private def checkSameKind(tups: List[(ExportInfo, Symbol)]): Option[ExportKind] = assert(tups.nonEmpty, "must have at least one export") val firstSym = tups.head._2 val overallKind = ExportKind(firstSym) var bad = false - for ((info, sym) <- tups.tail) { + for ((info, sym) <- tups.tail) val kind = ExportKind(sym) - if (kind != overallKind) { + if (kind != overallKind) bad = true report.error( em"export overload conflicts with export of $firstSym: they are of different types (${kind.tryToShow} / ${overallKind.tryToShow})", info.pos) - } - } if (bad) None else Some(overallKind) - } - private def checkSingleField(tups: List[(ExportInfo, Symbol)]): Symbol = { + private def checkSingleField(tups: List[(ExportInfo, Symbol)]): Symbol = assert(tups.nonEmpty, "must have at least one export") val firstSym = tups.head._2 - for ((info, _) <- tups.tail) { + for ((info, _) <- tups.tail) report.error( em"export overload conflicts with export of $firstSym: a field may not share its exported name with another export", info.pos) - } firstSym - } - def genTopLevelExports(classSym: ClassSymbol): List[js.TopLevelExportDef] = { + def genTopLevelExports(classSym: ClassSymbol): List[js.TopLevelExportDef] = val exports = for { sym <- classSym :: classSym.info.decls.toList info <- topLevelExportsOf(sym) - } yield { + } yield (info, sym) - } (for { (info, tups) <- exports.groupBy(_._1) @@ -158,7 +142,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { implicit val pos = info.pos - kind match { + kind match case Module => js.TopLevelModuleExportDef(info.moduleID, info.jsName) @@ -169,9 +153,8 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { case Constructor | Method => val exported = tups.map(_._2) - val methodDef = withNewLocalNameScope { + val methodDef = withNewLocalNameScope: genExportMethod(exported, JSName.Literal(info.jsName), static = true) - } js.TopLevelMethodExportDef(info.moduleID, methodDef) @@ -181,17 +164,14 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { case Field => val sym = checkSingleField(tups) js.TopLevelFieldExportDef(info.moduleID, info.jsName, encodeFieldSym(sym)) - } }).toList - } - def genStaticExports(classSym: Symbol): List[js.MemberDef] = { + def genStaticExports(classSym: Symbol): List[js.MemberDef] = val exports = for { sym <- classSym.info.decls.toList info <- staticExportsOf(sym) - } yield { + } yield (info, sym) - } (for { (info, tups) <- exports.groupBy(_._1) @@ -203,7 +183,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { import ExportKind._ - kind match { + kind match case Method => genMemberExportOrDispatcher(JSName.Literal(info.jsName), isProp = false, alts, static = true) @@ -223,35 +203,31 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { case kind => throw new AssertionError(s"unexpected static export kind: $kind") - } }).toList - } /** Generates exported methods and properties for a class. * * @param classSym symbol of the class we export for */ - def genMemberExports(classSym: ClassSymbol): List[js.MemberDef] = { + def genMemberExports(classSym: ClassSymbol): List[js.MemberDef] = val classInfo = classSym.info val allExports = classInfo.memberDenots(takeAllFilter, { (name, buf) => if (isExportName(name)) buf ++= classInfo.member(name).alternatives }) - val newlyDeclaredExports = if (classSym.superClass == NoSymbol) { + val newlyDeclaredExports = if (classSym.superClass == NoSymbol) allExports - } else { + else allExports.filterNot { denot => classSym.superClass.info.member(denot.name).hasAltWith(_.info =:= denot.info) } - } val newlyDeclaredExportNames = newlyDeclaredExports.map(_.name.toTermName).toList.distinct newlyDeclaredExportNames.map(genMemberExport(classSym, _)) - } - private def genMemberExport(classSym: ClassSymbol, name: TermName): js.MemberDef = { + private def genMemberExport(classSym: ClassSymbol, name: TermName): js.MemberDef = /* This used to be `.member(name)`, but it caused #3538, since we were * sometimes selecting mixin forwarders, whose type history does not go * far enough back in time to see varargs. We now explicitly exclude @@ -271,24 +247,20 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { // Check if we have a conflicting export of the other kind val conflicting = classSym.info.member(makeExportName(jsName, !isProp)) - if (conflicting.exists) { + if (conflicting.exists) val kind = if (isProp) "property" else "method" val conflictingMember = conflicting.alternatives.head.symbol.fullName - val errorPos: SrcPos = alts.map(_.symbol).filter(_.owner == classSym) match { + val errorPos: SrcPos = alts.map(_.symbol).filter(_.owner == classSym) match case Nil => classSym case altsInClass => altsInClass.minBy(_.span.point) - } report.error(em"Exported $kind $jsName conflicts with $conflictingMember", errorPos) - } genMemberExportOrDispatcher(JSName.Literal(jsName), isProp, alts.map(_.symbol), static = false) - } - def genJSClassDispatchers(classSym: Symbol, dispatchMethodsNames: List[JSName]): List[js.MemberDef] = { + def genJSClassDispatchers(classSym: Symbol, dispatchMethodsNames: List[JSName]): List[js.MemberDef] = dispatchMethodsNames.map(genJSClassDispatcher(classSym, _)) - } - private def genJSClassDispatcher(classSym: Symbol, name: JSName): js.MemberDef = { + private def genJSClassDispatcher(classSym: Symbol, name: JSName): js.MemberDef = val alts = classSym.info.membersBasedOnFlags(required = Method, excluded = Bridge) .map(_.symbol) .filter { sym => @@ -305,29 +277,25 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { val (propSyms, methodSyms) = alts.partition(_.isJSProperty) val isProp = propSyms.nonEmpty - if (isProp && methodSyms.nonEmpty) { + if (isProp && methodSyms.nonEmpty) val firstAlt = alts.head report.error( em"Conflicting properties and methods for ${classSym.fullName}::$name.", firstAlt.srcPos) implicit val pos = firstAlt.span js.JSPropertyDef(js.MemberFlags.empty, genExpr(name)(firstAlt.sourcePos), None, None) - } else { + else genMemberExportOrDispatcher(name, isProp, alts, static = false) - } - } private def genMemberExportOrDispatcher(jsName: JSName, isProp: Boolean, - alts: List[Symbol], static: Boolean): js.MemberDef = { - withNewLocalNameScope { + alts: List[Symbol], static: Boolean): js.MemberDef = + withNewLocalNameScope: if (isProp) genExportProperty(alts, jsName, static) else genExportMethod(alts, jsName, static) - } - } - private def genExportProperty(alts: List[Symbol], jsName: JSName, static: Boolean): js.JSPropertyDef = { + private def genExportProperty(alts: List[Symbol], jsName: JSName, static: Boolean): js.JSPropertyDef = assert(!alts.isEmpty, s"genExportProperty with empty alternatives for $jsName") implicit val pos: Position = alts.head.span @@ -350,22 +318,19 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { genApplyForSingleExported(new FormalArgsRegistry(0, false), new ExportedSymbol(getterSym, static), static) } - val setterArgAndBody = { - if (setters.isEmpty) { + val setterArgAndBody = + if (setters.isEmpty) None - } else { + else val formalArgsRegistry = new FormalArgsRegistry(1, false) val (List(arg), None) = formalArgsRegistry.genFormalArgs(): @unchecked val body = genOverloadDispatchSameArgc(jsName, formalArgsRegistry, setters.map(new ExportedSymbol(_, static)), jstpe.AnyType, None) Some((arg, body)) - } - } js.JSPropertyDef(flags, genExpr(jsName)(alts.head.sourcePos), getterBody, setterArgAndBody) - } - private def genExportMethod(alts0: List[Symbol], jsName: JSName, static: Boolean)(using Context): js.JSMethodDef = { + private def genExportMethod(alts0: List[Symbol], jsName: JSName, static: Boolean)(using Context): js.JSMethodDef = assert(alts0.nonEmpty, "need at least one alternative to generate exporter method") implicit val pos: SourcePosition = alts0.head.sourcePos @@ -376,12 +341,11 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { val flags = js.MemberFlags.empty.withNamespace(namespace) // toString() is always exported. We might need to add it here to get correct overloading. - val alts = jsName match { + val alts = jsName match case JSName.Literal("toString") if alts0.forall(_.info.paramInfoss.exists(_.nonEmpty)) => defn.Any_toString :: alts0 case _ => alts0 - } val overloads = alts.map(new ExportedSymbol(_, static)) @@ -390,10 +354,9 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { js.JSMethodDef(flags, genExpr(jsName), formalArgs, restParam, body)( OptimizerHints.empty, None) - } def genOverloadDispatch(jsName: JSName, alts: List[Exported], tpe: jstpe.Type)( - using pos: SourcePosition): (List[js.ParamDef], Option[js.ParamDef], js.Tree) = { + using pos: SourcePosition): (List[js.ParamDef], Option[js.ParamDef], js.Tree) = // Create the formal args registry val hasVarArg = alts.exists(_.hasRepeatedParam) @@ -419,19 +382,17 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { else genExportMethodMultiAlts(formalArgsRegistry, maxNonRepeatedArgc, alts, tpe, jsName) (formalArgs, restParam, body) - } private def genExportMethodMultiAlts(formalArgsRegistry: FormalArgsRegistry, maxNonRepeatedArgc: Int, alts: List[Exported], tpe: jstpe.Type, jsName: JSName)( - implicit pos: SourcePosition): js.Tree = { + implicit pos: SourcePosition): js.Tree = // Generate tuples (argc, method) val methodArgCounts = for { alt <- alts argc <- alt.minArgc to (if (alt.hasRepeatedParam) maxNonRepeatedArgc else alt.maxNonRepeatedArgc) - } yield { + } yield (argc, alt) - } // Create a list of (argCount -> methods), sorted by argCount (methods may appear multiple times) val methodsByArgCount: List[(Int, List[Exported])] = @@ -445,36 +406,31 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { val cases = for { (argc, methods) <- methodsByArgCount if methods != altsWithVarArgs // exclude default case we're generating anyways for varargs - } yield { + } yield // body of case to disambiguates methods with current count val caseBody = genOverloadDispatchSameArgc(jsName, formalArgsRegistry, methods, tpe, Some(argc)) List(js.IntLiteral(argc - formalArgsRegistry.minArgc)) -> caseBody - } - def defaultCase = { + def defaultCase = if (altsWithVarArgs.isEmpty) genThrowTypeError() else genOverloadDispatchSameArgc(jsName, formalArgsRegistry, altsWithVarArgs, tpe, None) - } - val body = { - if (cases.isEmpty) { + val body = + if (cases.isEmpty) defaultCase - } else if (cases.tail.isEmpty && altsWithVarArgs.isEmpty) { + else if (cases.tail.isEmpty && altsWithVarArgs.isEmpty) cases.head._2 - } else { + else val restArgRef = formalArgsRegistry.genRestArgRef() js.Match( js.AsInstanceOf(js.JSSelect(restArgRef, js.StringLiteral("length")), jstpe.IntType), cases, defaultCase)( tpe) - } - } body - } /** Resolves method calls to [[alts]] while assuming they have the same parameter count. * @@ -490,9 +446,8 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { * Maximum number of arguments to use for disambiguation */ private def genOverloadDispatchSameArgc(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, - alts: List[Exported], tpe: jstpe.Type, maxArgc: Option[Int]): js.Tree = { + alts: List[Exported], tpe: jstpe.Type, maxArgc: Option[Int]): js.Tree = genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, alts, tpe, paramIndex = 0, maxArgc) - } /** Resolves method calls to [[alts]] while assuming they have the same parameter count. * @@ -510,13 +465,13 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { * Maximum number of arguments to use for disambiguation */ private def genOverloadDispatchSameArgcRec(jsName: JSName, formalArgsRegistry: FormalArgsRegistry, - alts: List[Exported], tpe: jstpe.Type, paramIndex: Int, maxArgc: Option[Int]): js.Tree = { + alts: List[Exported], tpe: jstpe.Type, paramIndex: Int, maxArgc: Option[Int]): js.Tree = implicit val pos = alts.head.pos - if (alts.sizeIs == 1) { + if (alts.sizeIs == 1) alts.head.genBody(formalArgsRegistry) - } else if (maxArgc.exists(_ <= paramIndex) || !alts.exists(_.params.size > paramIndex)) { + else if (maxArgc.exists(_ <= paramIndex) || !alts.exists(_.params.size > paramIndex)) // We reach here in three cases: // 1. The parameter list has been exhausted // 2. The optional argument count restriction has triggered @@ -524,18 +479,18 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { // Therefore, we should fail reportCannotDisambiguateError(jsName, alts.map(_.sym)) js.Undefined() - } else { + else val altsByTypeTest = groupByWithoutHashCode(alts) { exported => typeTestForTpe(exported.exportArgTypeAt(paramIndex)) } - if (altsByTypeTest.size == 1) { + if (altsByTypeTest.size == 1) // Testing this parameter is not doing any us good genOverloadDispatchSameArgcRec(jsName, formalArgsRegistry, alts, tpe, paramIndex + 1, maxArgc) - } else { + else // Sort them so that, e.g., isInstanceOf[String] comes before isInstanceOf[Object] val sortedAltsByTypeTest = topoSortDistinctsWith(altsByTypeTest) { (lhs, rhs) => - (lhs._1, rhs._1) match { + (lhs._1, rhs._1) match // NoTypeTest is always last case (_, NoTypeTest) => true case (NoTypeTest, _) => false @@ -548,7 +503,6 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { case (_: PrimitiveTypeTest, _: InstanceOfTypeTest) => true case (_: InstanceOfTypeTest, _: PrimitiveTypeTest) => false - } } val defaultCase = genThrowTypeError() @@ -563,28 +517,23 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { def hasDefaultParam = subAlts.exists(_.hasDefaultAt(paramIndex)) - val optCond = typeTest match { + val optCond = typeTest match case PrimitiveTypeTest(tpe, _) => Some(js.IsInstanceOf(paramRef, tpe)) case InstanceOfTypeTest(tpe) => Some(genIsInstanceOf(paramRef, tpe)) case NoTypeTest => None - } optCond.fold[js.Tree] { genSubAlts // note: elsep is discarded, obviously } { cond => - val condOrUndef = if (!hasDefaultParam) cond else { + val condOrUndef = if (!hasDefaultParam) cond else js.If(cond, js.BooleanLiteral(true), js.BinaryOp(js.BinaryOp.===, paramRef, js.Undefined()))( jstpe.BooleanType) - } js.If(condOrUndef, genSubAlts, elsep)(tpe) } } - } - } - } - private def reportCannotDisambiguateError(jsName: JSName, alts: List[Symbol]): Unit = { + private def reportCannotDisambiguateError(jsName: JSName, alts: List[Symbol]): Unit = val currentClass = currentClassSym.get /* Find a position that is in the current class for decent error reporting. @@ -592,9 +541,8 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { * one coming last in the source text) so that we reliably display the * same error in all compilers. */ - val validPositions = alts.collect { + val validPositions = alts.collect: case alt if alt.owner == currentClass => alt.sourcePos - } val pos: SourcePosition = if (validPositions.isEmpty) currentClass.sourcePos else validPositions.maxBy(_.point) @@ -614,7 +562,6 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { report.error( em"Cannot disambiguate overloads for $fullKind $displayName with types\n $altsTypesInfo", pos) - } /** Generates a call to the method represented by the given `exported` while using the formalArguments * and potentially the argument array. @@ -622,17 +569,15 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { * Also inserts default parameters if required. */ private def genApplyForSingleExported(formalArgsRegistry: FormalArgsRegistry, - exported: Exported, static: Boolean): js.Tree = { - if (currentClassSym.isJSType && exported.sym.owner != currentClassSym.get) { + exported: Exported, static: Boolean): js.Tree = + if (currentClassSym.isJSType && exported.sym.owner != currentClassSym.get) assert(!static, s"nonsensical JS super call in static export of ${exported.sym}") genApplyForSingleExportedJSSuperCall(formalArgsRegistry, exported) - } else { + else genApplyForSingleExportedNonJSSuperCall(formalArgsRegistry, exported, static) - } - } private def genApplyForSingleExportedJSSuperCall( - formalArgsRegistry: FormalArgsRegistry, exported: Exported): js.Tree = { + formalArgsRegistry: FormalArgsRegistry, exported: Exported): js.Tree = implicit val pos = exported.pos val sym = exported.sym @@ -641,51 +586,46 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { val allArgs = formalArgsRegistry.genAllArgsRefsForForwarder() - val superClass = { + val superClass = val superClassSym = currentClassSym.asClass.superClass if (superClassSym.isNestedJSClass) js.VarRef(js.LocalIdent(JSSuperClassParamName))(jstpe.AnyType) else js.LoadJSConstructor(encodeClassName(superClassSym)) - } val receiver = js.This()(currentThisType) val nameTree = genExpr(sym.jsName) - if (sym.isJSGetter) { + if (sym.isJSGetter) assert(allArgs.isEmpty, s"getter symbol $sym does not have a getter signature") js.JSSuperSelect(superClass, receiver, nameTree) - } else if (sym.isJSSetter) { + else if (sym.isJSSetter) assert(allArgs.size == 1 && allArgs.head.isInstanceOf[js.Tree], s"setter symbol $sym does not have a setter signature") js.Assign(js.JSSuperSelect(superClass, receiver, nameTree), allArgs.head.asInstanceOf[js.Tree]) - } else { + else js.JSSuperMethodCall(superClass, receiver, nameTree, allArgs) - } - } private def genApplyForSingleExportedNonJSSuperCall( - formalArgsRegistry: FormalArgsRegistry, exported: Exported, static: Boolean): js.Tree = { + formalArgsRegistry: FormalArgsRegistry, exported: Exported, static: Boolean): js.Tree = implicit val pos = exported.pos val varDefs = new mutable.ListBuffer[js.VarDef] - for ((param, i) <- exported.params.zipWithIndex) { + for ((param, i) <- exported.params.zipWithIndex) val rhs = genScalaArg(exported, i, formalArgsRegistry, param, static, captures = Nil)( prevArgsCount => varDefs.take(prevArgsCount).toList.map(_.ref)) varDefs += js.VarDef(freshLocalIdent("prep" + i), NoOriginalName, rhs.tpe, mutable = false, rhs) - } val builtVarDefs = varDefs.result() val jsResult = genResult(exported, builtVarDefs.map(_.ref), static) js.Block(builtVarDefs :+ jsResult) - } /** Generates a Scala argument from dispatched JavaScript arguments * (unboxing and default parameter handling). @@ -693,34 +633,31 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { def genScalaArg(exported: Exported, paramIndex: Int, formalArgsRegistry: FormalArgsRegistry, param: JSParamInfo, static: Boolean, captures: List[js.Tree])( previousArgsValues: Int => List[js.Tree])( - implicit pos: SourcePosition): js.Tree = { + implicit pos: SourcePosition): js.Tree = - if (param.repeated) { + if (param.repeated) genJSArrayToVarArgs(formalArgsRegistry.genVarargRef(paramIndex)) - } else { + else val jsArg = formalArgsRegistry.genArgRef(paramIndex) // Unboxed argument (if it is defined) val unboxedArg = unbox(jsArg, param.info) - if (exported.hasDefaultAt(paramIndex)) { + if (exported.hasDefaultAt(paramIndex)) // If argument is undefined and there is a default getter, call it js.If(js.BinaryOp(js.BinaryOp.===, jsArg, js.Undefined()), { genCallDefaultGetter(exported.sym, paramIndex, static, captures)(previousArgsValues) }, { unboxedArg })(unboxedArg.tpe) - } else { + else // Otherwise, it is always the unboxed argument unboxedArg - } - } - } def genCallDefaultGetter(sym: Symbol, paramIndex: Int, static: Boolean, captures: List[js.Tree])( previousArgsValues: Int => List[js.Tree])( - implicit pos: SourcePosition): js.Tree = { + implicit pos: SourcePosition): js.Tree = val targetSym = targetSymForDefaultGetter(sym) val defaultGetterDenot = this.defaultGetterDenot(targetSym, sym, paramIndex) @@ -729,12 +666,12 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { assert(!defaultGetterDenot.isOverloaded, i"found overloaded default getter $defaultGetterDenot") val defaultGetter = defaultGetterDenot.symbol - val targetTree = { - if (sym.isClassConstructor || static) { - if (targetSym.isStatic) { + val targetTree = + if (sym.isClassConstructor || static) + if (targetSym.isStatic) assert(captures.isEmpty, i"expected empty captures for ${targetSym.fullName} at $pos") genLoadModule(targetSym) - } else { + else assert(captures.sizeIs == 1, "expected exactly one capture") // Find the module accessor. We cannot use memberBasedOnFlags because of scala-js/scala-js#4526. @@ -751,45 +688,39 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { genApplyJSClassMethod(receiver, modAccessor, Nil) else genApplyMethodMaybeStatically(receiver, modAccessor, Nil) - } - } else { + else js.This()(currentThisType) - } - } // Pass previous arguments to defaultGetter val defaultGetterArgs = previousArgsValues(defaultGetter.info.paramInfoss.head.size) - val callGetter = if (targetSym.isJSType) { - if (defaultGetter.owner.isNonNativeJSClass) { + val callGetter = if (targetSym.isJSType) + if (defaultGetter.owner.isNonNativeJSClass) if (defaultGetter.hasAnnotation(jsdefn.JSOptionalAnnot)) js.Undefined() else genApplyJSClassMethod(targetTree, defaultGetter, defaultGetterArgs) - } else if (defaultGetter.owner == targetSym) { + else if (defaultGetter.owner == targetSym) /* We get here if a non-native constructor has a native companion. * This is reported on a per-class level. */ assert(sym.isClassConstructor, s"got non-constructor method $sym with default method in JS native companion") js.Undefined() - } else { + else report.error( "When overriding a native method with default arguments, " + "the overriding method must explicitly repeat the default arguments.", sym.srcPos) js.Undefined() - } - } else { + else genApplyMethod(targetTree, defaultGetter, defaultGetterArgs) - } // #15419 If the getter returns void, we must "box" it by returning undefined if (callGetter.tpe == jstpe.NoType) js.Block(callGetter, js.Undefined()) else callGetter - } private def targetSymForDefaultGetter(sym: Symbol): Symbol = if (sym.isClassConstructor) sym.owner.companionModule.moduleClass @@ -803,7 +734,7 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { /** Generate the final forwarding call to the exported method. */ private def genResult(exported: Exported, args: List[js.Tree], static: Boolean)( - implicit pos: SourcePosition): js.Tree = { + implicit pos: SourcePosition): js.Tree = val sym = exported.sym val currentClass = currentClassSym.get @@ -815,18 +746,16 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { def boxIfNeeded(call: js.Tree): js.Tree = box(call, atPhase(elimErasedValueTypePhase)(sym.info.resultType)) - if (currentClass.isNonNativeJSClass) { + if (currentClass.isNonNativeJSClass) assert(sym.owner == currentClass, sym.fullName) boxIfNeeded(genApplyJSClassMethod(receiver, sym, args)) - } else { + else if (sym.isClassConstructor) js.New(encodeClassName(currentClass), encodeMethodSym(sym), args) else if (sym.isPrivate) boxIfNeeded(genApplyMethodStatically(receiver, sym, args)) else boxIfNeeded(genApplyMethod(receiver, sym, args)) - } - } private def genThrowTypeError(msg: String = "No matching overload")(implicit pos: Position): js.Tree = js.Throw(js.JSNew(js.JSGlobalRef("TypeError"), js.StringLiteral(msg) :: Nil)) @@ -835,55 +764,48 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { val sym: Symbol, // Parameters participating in overload resolution. val params: scala.collection.immutable.IndexedSeq[JSParamInfo] - ) { + ): assert(!params.exists(_.capture), "illegal capture params in Exported") - private val paramsHasDefault = { - if (!atPhase(elimRepeatedPhase)(sym.hasDefaultParams)) { + private val paramsHasDefault = + if (!atPhase(elimRepeatedPhase)(sym.hasDefaultParams)) Vector.empty - } else { + else val targetSym = targetSymForDefaultGetter(sym) params.indices.map(i => defaultGetterDenot(targetSym, sym, i).exists) - } - } def hasDefaultAt(paramIndex: Int): Boolean = paramIndex < paramsHasDefault.size && paramsHasDefault(paramIndex) val hasRepeatedParam = params.nonEmpty && params.last.repeated - val minArgc = { + val minArgc = // Find the first default param or repeated param params .indices .find(i => hasDefaultAt(i) || params(i).repeated) .getOrElse(params.size) - } val maxNonRepeatedArgc = if (hasRepeatedParam) params.size - 1 else params.size def pos: SourcePosition = sym.sourcePos - def exportArgTypeAt(paramIndex: Int): Type = { - if (paramIndex < params.length) { + def exportArgTypeAt(paramIndex: Int): Type = + if (paramIndex < params.length) params(paramIndex).info - } else { + else assert(hasRepeatedParam, i"$sym does not have varargs nor enough params for $paramIndex") params.last.info - } - } def typeInfo: String = sym.info.toString def genBody(formalArgsRegistry: FormalArgsRegistry): js.Tree - } private class ExportedSymbol(sym: Symbol, static: Boolean) - extends Exported(sym, sym.jsParamInfos.toIndexedSeq) { + extends Exported(sym, sym.jsParamInfos.toIndexedSeq): def genBody(formalArgsRegistry: FormalArgsRegistry): js.Tree = genApplyForSingleExported(formalArgsRegistry, this, static) - } // !!! Hash codes of RTTypeTest are meaningless because of InstanceOfTypeTest private sealed abstract class RTTypeTest @@ -891,42 +813,36 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { private case class PrimitiveTypeTest(tpe: jstpe.Type, rank: Int) extends RTTypeTest // !!! This class does not have a meaningful hash code - private case class InstanceOfTypeTest(tpe: Type) extends RTTypeTest { - override def equals(that: Any): Boolean = { - that match { + private case class InstanceOfTypeTest(tpe: Type) extends RTTypeTest: + override def equals(that: Any): Boolean = + that match case InstanceOfTypeTest(thatTpe) => tpe =:= thatTpe case _ => false - } - } - } private case object NoTypeTest extends RTTypeTest /** Very simple O(n²) topological sort for elements assumed to be distinct. */ - private def topoSortDistinctsWith[A <: AnyRef](coll: List[A])(lteq: (A, A) => Boolean): List[A] = { + private def topoSortDistinctsWith[A <: AnyRef](coll: List[A])(lteq: (A, A) => Boolean): List[A] = @tailrec - def loop(coll: List[A], acc: List[A]): List[A] = { + def loop(coll: List[A], acc: List[A]): List[A] = if (coll.isEmpty) acc else if (coll.tail.isEmpty) coll.head :: acc - else { + else val (lhs, rhs) = coll.span(x => !coll.forall(y => (x eq y) || !lteq(x, y))) assert(!rhs.isEmpty, s"cycle while ordering $coll") loop(lhs ::: rhs.tail, rhs.head :: acc) - } - } loop(coll, Nil) - } - private def typeTestForTpe(tpe: Type): RTTypeTest = { - tpe match { + private def typeTestForTpe(tpe: Type): RTTypeTest = + tpe match case tpe: ErasedValueType => InstanceOfTypeTest(tpe.tycon.typeSymbol.typeRef) case _ => import org.scalajs.ir.Names - (toIRType(tpe): @unchecked) match { + (toIRType(tpe): @unchecked) match case jstpe.AnyType => NoTypeTest case jstpe.NoType => PrimitiveTypeTest(jstpe.UndefType, 0) @@ -944,28 +860,23 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { case jstpe.ClassType(_) => InstanceOfTypeTest(tpe) case jstpe.ArrayType(_) => InstanceOfTypeTest(tpe) - } - } - } // Group-by that does not rely on hashCode(), only equals() - O(n²) - private def groupByWithoutHashCode[A, B](coll: List[A])(f: A => B): List[(B, List[A])] = { + private def groupByWithoutHashCode[A, B](coll: List[A])(f: A => B): List[(B, List[A])] = val m = new mutable.ArrayBuffer[(B, List[A])] m.sizeHint(coll.length) - for (elem <- coll) { + for (elem <- coll) val key = f(elem) val index = m.indexWhere(_._1 == key) if (index < 0) m += ((key, List(elem))) else m(index) = (key, elem :: m(index)._2) - } m.toList - } - class FormalArgsRegistry(val minArgc: Int, needsRestParam: Boolean) { + class FormalArgsRegistry(val minArgc: Int, needsRestParam: Boolean): private val fixedParamNames: scala.collection.immutable.IndexedSeq[jsNames.LocalName] = (0 until minArgc).toIndexedSeq.map(_ => freshLocalIdent("arg")(NoPosition).name) @@ -973,53 +884,44 @@ final class JSExportsGen(jsCodeGen: JSCodeGen)(using Context) { if (needsRestParam) freshLocalIdent("rest")(NoPosition).name else null - def genFormalArgs()(implicit pos: Position): (List[js.ParamDef], Option[js.ParamDef]) = { + def genFormalArgs()(implicit pos: Position): (List[js.ParamDef], Option[js.ParamDef]) = val fixedParamDefs = fixedParamNames.toList.map { paramName => js.ParamDef(js.LocalIdent(paramName), NoOriginalName, jstpe.AnyType, mutable = false) } - val restParam = { + val restParam = if (needsRestParam) Some(js.ParamDef(js.LocalIdent(restParamName), NoOriginalName, jstpe.AnyType, mutable = false)) else None - } (fixedParamDefs, restParam) - } - def genArgRef(index: Int)(implicit pos: Position): js.Tree = { + def genArgRef(index: Int)(implicit pos: Position): js.Tree = if (index < minArgc) js.VarRef(js.LocalIdent(fixedParamNames(index)))(jstpe.AnyType) else js.JSSelect(genRestArgRef(), js.IntLiteral(index - minArgc)) - } - def genVarargRef(fixedParamCount: Int)(implicit pos: Position): js.Tree = { + def genVarargRef(fixedParamCount: Int)(implicit pos: Position): js.Tree = assert(fixedParamCount >= minArgc, s"genVarargRef($fixedParamCount) with minArgc = $minArgc at $pos") val restParam = genRestArgRef() if (fixedParamCount == minArgc) restParam else js.JSMethodApply(restParam, js.StringLiteral("slice"), List(js.IntLiteral(fixedParamCount - minArgc))) - } - def genRestArgRef()(implicit pos: Position): js.Tree = { + def genRestArgRef()(implicit pos: Position): js.Tree = assert(needsRestParam, s"trying to generate a reference to non-existent rest param at $pos") js.VarRef(js.LocalIdent(restParamName))(jstpe.AnyType) - } - def genAllArgsRefsForForwarder()(implicit pos: Position): List[js.TreeOrJSSpread] = { + def genAllArgsRefsForForwarder()(implicit pos: Position): List[js.TreeOrJSSpread] = val fixedArgRefs = fixedParamNames.toList.map { paramName => js.VarRef(js.LocalIdent(paramName))(jstpe.AnyType) } - if (needsRestParam) { + if (needsRestParam) val restArgRef = js.VarRef(js.LocalIdent(restParamName))(jstpe.AnyType) fixedArgRefs :+ js.JSSpread(restArgRef) - } else { + else fixedArgRefs - } - } - } -} diff --git a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala index 2fd007165952..b61352e7cd0d 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPositions.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPositions.scala @@ -16,40 +16,35 @@ import dotty.tools.dotc.util.Spans.Span import org.scalajs.ir /** Conversion utilities from dotty Positions to IR Positions. */ -class JSPositions()(using Context) { +class JSPositions()(using Context): import JSPositions._ - private val sourceURIMaps: List[URIMap] = { + private val sourceURIMaps: List[URIMap] = ctx.settings.scalajsMapSourceURI.value.flatMap { option => val uris = option.split("->") - if (uris.length != 1 && uris.length != 2) { + if (uris.length != 1 && uris.length != 2) report.error("-scalajs-mapSourceURI needs one or two URIs as argument (separated by '->').") Nil - } else { - try { + else + try val from = new URI(uris.head) val to = uris.lift(1).map(str => new URI(str)) URIMap(from, to) :: Nil - } catch { + catch case e: URISyntaxException => report.error(em"${e.getInput} is not a valid URI") Nil - } - } } - } - private def sourceAndSpan2irPos(source: SourceFile, span: Span): ir.Position = { + private def sourceAndSpan2irPos(source: SourceFile, span: Span): ir.Position = if (!span.exists) ir.Position.NoPosition - else { + else // dotty positions and IR positions are both 0-based val irSource = span2irPosCache.toIRSource(source) val point = span.point val line = source.offsetToLine(point) val column = source.column(point) ir.Position(irSource, line, column) - } - } /** Implicit conversion from dotty Span to ir.Position. */ implicit def span2irPos(span: Span): ir.Position = @@ -63,22 +58,20 @@ class JSPositions()(using Context) { implicit def implicitSourcePos2irPos(implicit sourcePos: SourcePosition): ir.Position = sourceAndSpan2irPos(sourcePos.source, sourcePos.span) - private object span2irPosCache { + private object span2irPosCache: import dotty.tools.dotc.util._ private var lastDotcSource: SourceFile = null private var lastIRSource: ir.Position.SourceFile = null - def toIRSource(dotcSource: SourceFile): ir.Position.SourceFile = { - if (dotcSource != lastDotcSource) { + def toIRSource(dotcSource: SourceFile): ir.Position.SourceFile = + if (dotcSource != lastDotcSource) lastIRSource = convert(dotcSource) lastDotcSource = dotcSource - } lastIRSource - } - private def convert(dotcSource: SourceFile): ir.Position.SourceFile = { - dotcSource.file.file match { + private def convert(dotcSource: SourceFile): ir.Position.SourceFile = + dotcSource.file.file match case null => new java.net.URI( "virtualfile", // Pseudo-Scheme @@ -92,11 +85,6 @@ class JSPositions()(using Context) { val relURI = from.relativize(srcURI) to.fold(relURI)(_.resolve(relURI)) }.getOrElse(srcURI) - } - } - } -} -object JSPositions { +object JSPositions: final case class URIMap(from: URI, to: Option[URI]) -} diff --git a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala index 029273aed54b..316c93cadac4 100644 --- a/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala +++ b/compiler/src/dotty/tools/backend/sjs/JSPrimitives.scala @@ -12,7 +12,7 @@ import dotty.tools.backend.jvm.DottyPrimitives import dotty.tools.dotc.report import dotty.tools.dotc.util.ReadOnlyMap -object JSPrimitives { +object JSPrimitives: inline val FirstJSPrimitiveCode = 300 @@ -61,9 +61,8 @@ object JSPrimitives { def isJSPrimitive(code: Int): Boolean = code >= FirstJSPrimitiveCode && code <= LastJSPrimitiveCode -} -class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { +class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx): import JSPrimitives._ private lazy val jsPrimitives: ReadOnlyMap[Symbol, Int] = initJSPrimitives(using ictx) @@ -81,26 +80,23 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { jsPrimitives.contains(fun.symbol(using ictx)) || super.isPrimitive(fun) /** Initialize the primitive map */ - private def initJSPrimitives(using Context): ReadOnlyMap[Symbol, Int] = { + private def initJSPrimitives(using Context): ReadOnlyMap[Symbol, Int] = val primitives = MutableSymbolMap[Int]() // !!! Code duplicate with DottyPrimitives /** Add a primitive operation to the map */ - def addPrimitive(s: Symbol, code: Int): Unit = { + def addPrimitive(s: Symbol, code: Int): Unit = assert(!(primitives contains s), "Duplicate primitive " + s) primitives(s) = code - } - def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = { + def addPrimitives(cls: Symbol, method: TermName, code: Int)(using Context): Unit = val alts = cls.info.member(method).alternatives.map(_.symbol) - if (alts.isEmpty) { + if (alts.isEmpty) report.error(em"Unknown primitive method $cls.$method") - } else { + else for (s <- alts) addPrimitive(s, code) - } - } val jsdefn = JSDefinitions.jsdefn @@ -145,6 +141,4 @@ class JSPrimitives(ictx: Context) extends DottyPrimitives(ictx) { addPrimitive(jsdefn.ReflectSelectable_applyDynamic, REFLECT_SELECTABLE_APPLYDYN) primitives - } -} diff --git a/compiler/src/dotty/tools/backend/sjs/ScopedVar.scala b/compiler/src/dotty/tools/backend/sjs/ScopedVar.scala index be5b8e7bb416..748dd978becf 100644 --- a/compiler/src/dotty/tools/backend/sjs/ScopedVar.scala +++ b/compiler/src/dotty/tools/backend/sjs/ScopedVar.scala @@ -1,6 +1,6 @@ package dotty.tools.backend.sjs -class ScopedVar[A](init: A) { +class ScopedVar[A](init: A): import ScopedVar.Assignment private[ScopedVar] var value = init @@ -9,30 +9,23 @@ class ScopedVar[A](init: A) { def get: A = value def :=(newValue: A): Assignment[A] = new Assignment(this, newValue) -} -object ScopedVar { - class Assignment[T](scVar: ScopedVar[T], value: T) { - private[ScopedVar] def push(): AssignmentStackElement[T] = { +object ScopedVar: + class Assignment[T](scVar: ScopedVar[T], value: T): + private[ScopedVar] def push(): AssignmentStackElement[T] = val stack = new AssignmentStackElement(scVar, scVar.value) scVar.value = value stack - } - } - private class AssignmentStackElement[T](scVar: ScopedVar[T], oldValue: T) { - private[ScopedVar] def pop(): Unit = { + private class AssignmentStackElement[T](scVar: ScopedVar[T], oldValue: T): + private[ScopedVar] def pop(): Unit = scVar.value = oldValue - } - } implicit def toValue[T](scVar: ScopedVar[T]): T = scVar.get - def withScopedVars[T](ass: Assignment[_]*)(body: => T): T = { + def withScopedVars[T](ass: Assignment[_]*)(body: => T): T = val stack = ass.map(_.push()) try body finally stack.reverse.foreach(_.pop()) - } final class VarBox[A](var value: A) -} diff --git a/compiler/src/dotty/tools/dotc/Bench.scala b/compiler/src/dotty/tools/dotc/Bench.scala index 5f5e9fc799b5..e3b7298e7067 100644 --- a/compiler/src/dotty/tools/dotc/Bench.scala +++ b/compiler/src/dotty/tools/dotc/Bench.scala @@ -32,11 +32,10 @@ object Bench extends Driver: System.in.nn.read() reporter - def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = { + def extractNumArg(args: Array[String], name: String, default: Int = 1): (Int, Array[String]) = val pos = args indexOf name if (pos < 0) (default, args) else (args(pos + 1).toInt, (args take pos) ++ (args drop (pos + 2))) - } def reportTimes() = val best = times.sorted diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 8415646eb16c..6766f0b862b0 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -19,7 +19,7 @@ import scala.annotation.internal.sharable import scala.util.control.NoStackTrace import transform.MacroAnnotations -class CompilationUnit protected (val source: SourceFile) { +class CompilationUnit protected (val source: SourceFile): override def toString: String = source.toString @@ -92,9 +92,8 @@ class CompilationUnit protected (val source: SourceFile) { def assignmentSpans(using Context): Map[Int, List[Span]] = if myAssignmentSpans == null then myAssignmentSpans = Nullables.assignmentSpans myAssignmentSpans.nn -} -@sharable object NoCompilationUnit extends CompilationUnit(NoSource) { +@sharable object NoCompilationUnit extends CompilationUnit(NoSource): override def isJava: Boolean = false @@ -102,9 +101,8 @@ class CompilationUnit protected (val source: SourceFile) { throw CompilationUnit.SuspendException() override def assignmentSpans(using Context): Map[Int, List[Span]] = Map.empty -} -object CompilationUnit { +object CompilationUnit: class SuspendException extends Exception with NoStackTrace @@ -114,46 +112,41 @@ object CompilationUnit { apply(SourceFile(file, Array.empty[Char]), unpickled, forceTrees) /** Make a compilation unit, given picked bytes and unpickled tree */ - def apply(source: SourceFile, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = { + def apply(source: SourceFile, unpickled: Tree, forceTrees: Boolean)(using Context): CompilationUnit = assert(!unpickled.isEmpty, unpickled) val unit1 = new CompilationUnit(source) unit1.tpdTree = unpickled - if (forceTrees) { + if (forceTrees) val force = new Force force.traverse(unit1.tpdTree) unit1.needsStaging = force.containsQuote unit1.needsInlining = force.containsInline unit1.hasMacroAnnotations = force.containsMacroAnnotation - } unit1 - } /** Create a compilation unit corresponding to `source`. * If `mustExist` is true, this will fail if `source` does not exist. */ - def apply(source: SourceFile, mustExist: Boolean = true)(using Context): CompilationUnit = { + def apply(source: SourceFile, mustExist: Boolean = true)(using Context): CompilationUnit = val src = if (!mustExist) source - else if (source.file.isDirectory) { + else if (source.file.isDirectory) report.error(em"expected file, received directory '${source.file.path}'") NoSource - } - else if (!source.file.exists) { + else if (!source.file.exists) report.error(em"source file not found: ${source.file.path}") NoSource - } else source new CompilationUnit(src) - } /** Force the tree to be loaded */ - private class Force extends TreeTraverser { + private class Force extends TreeTraverser: var containsQuote = false var containsInline = false var containsCaptureChecking = false var containsMacroAnnotation = false - def traverse(tree: Tree)(using Context): Unit = { + def traverse(tree: Tree)(using Context): Unit = if tree.symbol.is(Flags.Inline) then containsInline = true tree match @@ -172,6 +165,3 @@ object CompilationUnit { if MacroAnnotations.isMacroAnnotation(annot) then ctx.compilationUnit.hasMacroAnnotations = true traverseChildren(tree) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index a6118732d4ae..fce0c2cac8fb 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -15,7 +15,7 @@ import localopt.StringInterpolatorOpt /** The central class of the dotc compiler. The job of a compiler is to create * runs, which process given `phases` in a given `rootContext`. */ -class Compiler { +class Compiler: /** Meta-ordering constraint: * @@ -152,17 +152,15 @@ class Compiler { Nil var runId: Int = 1 - def nextRunId: Int = { + def nextRunId: Int = runId += 1; runId - } - def reset()(using Context): Unit = { + def reset()(using Context): Unit = ctx.base.reset() val run = ctx.run if (run != null) run.reset() - } - def newRun(using Context): Run = { + def newRun(using Context): Run = reset() val rctx = if ctx.settings.Xsemanticdb.value then @@ -170,5 +168,3 @@ class Compiler { else ctx new Run(this, rctx) - } -} diff --git a/compiler/src/dotty/tools/dotc/Driver.scala b/compiler/src/dotty/tools/dotc/Driver.scala index e548cae55ddd..89ec44608a1a 100644 --- a/compiler/src/dotty/tools/dotc/Driver.scala +++ b/compiler/src/dotty/tools/dotc/Driver.scala @@ -20,7 +20,7 @@ import fromtasty.{TASTYCompiler, TastyFileUtil} * process, but in most cases you only need to call [[process]] on the * existing object [[Main]]. */ -class Driver { +class Driver: protected def newCompiler(using Context): Compiler = if (ctx.settings.fromTasty.value) new TASTYCompiler @@ -71,7 +71,7 @@ class Driver { * this method returns a list of files to compile and an updated Context. * If compilation should be interrupted, this method returns None. */ - def setup(args: Array[String], rootCtx: Context): Option[(List[AbstractFile], Context)] = { + def setup(args: Array[String], rootCtx: Context): Option[(List[AbstractFile], Context)] = val ictx = rootCtx.fresh val summary = command.distill(args, ictx.settings)(ictx.settingsState)(using ictx) ictx.setSettings(summary.sstate) @@ -79,7 +79,7 @@ class Driver { MacroClassLoader.init(ictx) Positioned.init(using ictx) - inContext(ictx) { + inContext(ictx): if !ctx.settings.YdropComments.value || ctx.settings.YreadComments.value then ictx.setProperty(ContextDoc, new ContextDocstrings) val fileNamesOrNone = command.checkUsage(summary, sourcesRequired)(using ctx.settings)(using ctx.settingsState) @@ -87,8 +87,6 @@ class Driver { val files = fileNames.map(ctx.getFile) (files, fromTastySetup(files)) } - } - } /** Setup extra classpath of tasty and jar files */ protected def fromTastySetup(files: List[AbstractFile])(using Context): Context = @@ -137,10 +135,9 @@ class Driver { * @return */ final def process(args: Array[String], simple: interfaces.SimpleReporter | Null, - callback: interfaces.CompilerCallback | Null): interfaces.ReporterResult = { + callback: interfaces.CompilerCallback | Null): interfaces.ReporterResult = val reporter = if (simple == null) null else Reporter.fromSimpleReporter(simple) process(args, reporter, callback) - } /** Principal entry point to the compiler. * @@ -156,14 +153,13 @@ class Driver { * if compilation succeeded. */ final def process(args: Array[String], reporter: Reporter | Null = null, - callback: interfaces.CompilerCallback | Null = null): Reporter = { + callback: interfaces.CompilerCallback | Null = null): Reporter = val compileCtx = initCtx.fresh if (reporter != null) compileCtx.setReporter(reporter) if (callback != null) compileCtx.setCompilerCallback(callback) process(args, compileCtx) - } /** Entry point to the compiler with no optional arguments. * @@ -191,19 +187,16 @@ class Driver { * @return The `Reporter` used. Use `Reporter#hasErrors` to check * if compilation succeeded. */ - def process(args: Array[String], rootCtx: Context): Reporter = { + def process(args: Array[String], rootCtx: Context): Reporter = setup(args, rootCtx) match case Some((files, compileCtx)) => doCompile(newCompiler(using compileCtx), files)(using compileCtx) case None => rootCtx.reporter - } - def main(args: Array[String]): Unit = { + def main(args: Array[String]): Unit = // Preload scala.util.control.NonFatal. Otherwise, when trying to catch a StackOverflowError, // we may try to load it but fail with another StackOverflowError and lose the original exception, // see . val _ = NonFatal sys.exit(if (process(args).hasErrors) 1 else 0) - } -} diff --git a/compiler/src/dotty/tools/dotc/Resident.scala b/compiler/src/dotty/tools/dotc/Resident.scala index 0b9bca0dc75b..b9a9dda6698f 100644 --- a/compiler/src/dotty/tools/dotc/Resident.scala +++ b/compiler/src/dotty/tools/dotc/Resident.scala @@ -23,7 +23,7 @@ import scala.annotation.tailrec * * dotc> :q // quit */ -class Resident extends Driver { +class Resident extends Driver: object residentCompiler extends Compiler @@ -33,29 +33,23 @@ class Resident extends Driver { private val reset = ":reset" private val prompt = "dotc> " - private def getLine() = { + private def getLine() = Console.print(prompt) try scala.io.StdIn.readLine() catch { case _: EOFException => quit } - } - final override def process(args: Array[String], rootCtx: Context): Reporter = { - @tailrec def loop(args: Array[String], prevCtx: Context): Reporter = { + final override def process(args: Array[String], rootCtx: Context): Reporter = + @tailrec def loop(args: Array[String], prevCtx: Context): Reporter = setup(args, prevCtx) match case Some((files, ctx)) => - inContext(ctx) { + inContext(ctx): doCompile(residentCompiler, files) - } var nextCtx = ctx var line = getLine() - while (line == reset) { + while (line == reset) nextCtx = rootCtx line = getLine() - } if line.startsWith(quit) then ctx.reporter else loop((line split "\\s+").asInstanceOf[Array[String]], nextCtx) case None => prevCtx.reporter - } loop(args, rootCtx) - } -} diff --git a/compiler/src/dotty/tools/dotc/Run.scala b/compiler/src/dotty/tools/dotc/Run.scala index 944ae794c94f..8df412141623 100644 --- a/compiler/src/dotty/tools/dotc/Run.scala +++ b/compiler/src/dotty/tools/dotc/Run.scala @@ -33,7 +33,7 @@ import scala.util.control.NonFatal import scala.io.Codec /** A compiler run. Exports various methods to compile source files */ -class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo { +class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with ConstraintRunInfo: /** Default timeout to stop looking for further implicit suggestions, in ms. * This is usually for the first import suggestion; subsequent suggestions @@ -82,26 +82,24 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint mySuspendedMessages.getOrElseUpdate(warning.pos.source, mutable.LinkedHashSet.empty) += warning def nowarnAction(dia: Diagnostic): Action.Warning.type | Action.Verbose.type | Action.Silent.type = - mySuppressions.getOrElse(dia.pos.source, Nil).find(_.matches(dia)) match { + mySuppressions.getOrElse(dia.pos.source, Nil).find(_.matches(dia)) match case Some(s) => s.markUsed() if (s.verbose) Action.Verbose else Action.Silent case _ => Action.Warning - } def addSuppression(sup: Suppression): Unit = val source = sup.annotPos.source mySuppressions.getOrElseUpdate(source, mutable.ListBuffer.empty) += sup - def reportSuspendedMessages(source: SourceFile)(using Context): Unit = { + def reportSuspendedMessages(source: SourceFile)(using Context): Unit = // sort suppressions. they are not added in any particular order because of lazy type completion for (sups <- mySuppressions.get(source)) mySuppressions(source) = sups.sortBy(sup => 0 - sup.start) mySuppressionsComplete += source mySuspendedMessages.remove(source).foreach(_.foreach(ctx.reporter.issueIfNotSuppressed)) - } def runFinished(hasErrors: Boolean): Unit = // report suspended messages (in case the run finished before typer) @@ -146,13 +144,11 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint * These files do not have to be source files since it's possible to compile * from TASTY. */ - def files: Set[AbstractFile] = { - if (myUnits ne myUnitsCached) { + def files: Set[AbstractFile] = + if (myUnits ne myUnitsCached) myUnitsCached = myUnits myFiles = (myUnits ++ suspendedUnits).map(_.source.file).toSet - } myFiles - } /** The source files of all late entered symbols, as a set */ private var lateFiles = mutable.Set[AbstractFile]() @@ -189,25 +185,22 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint * account. I think the latter would be preferable. */ def compileSources(sources: List[SourceFile]): Unit = - if (sources forall (_.exists)) { + if (sources forall (_.exists)) units = sources.map(CompilationUnit(_)) compileUnits() - } - def compileUnits(us: List[CompilationUnit]): Unit = { + def compileUnits(us: List[CompilationUnit]): Unit = units = us compileUnits() - } - def compileUnits(us: List[CompilationUnit], ctx: Context): Unit = { + def compileUnits(us: List[CompilationUnit], ctx: Context): Unit = units = us compileUnits()(using ctx) - } var profile: Profile = NoProfile - private def compileUnits()(using Context) = Stats.maybeMonitored { + private def compileUnits()(using Context) = Stats.maybeMonitored: if (!ctx.mode.is(Mode.Interactive)) // IDEs might have multi-threaded access, accesses are synchronized ctx.base.checkSingleThreaded() @@ -233,14 +226,14 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if ctx.settings.YnoDoubleBindings.value then ctx.base.checkNoDoubleBindings = true - def runPhases(using Context) = { + def runPhases(using Context) = var lastPrintedTree: PrintedTree = NoPrintedTree val profiler = ctx.profiler var phasesWereAdjusted = false for (phase <- ctx.base.allPhases) if (phase.isRunnable) - Stats.trackTime(s"$phase ms ") { + Stats.trackTime(s"$phase ms "): val start = System.currentTimeMillis val profileBefore = profiler.beforePhase(phase) units = phase.runOn(units) @@ -254,7 +247,6 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint for (unit <- units) Stats.record(s"retained typed trees at end of $phase", unit.tpdTree.treeSize) ctx.typerState.gc() - } if !phasesWereAdjusted then phasesWereAdjusted = true if !Feature.ccEnabledSomewhere then @@ -262,7 +254,6 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint ctx.base.unlinkPhaseAsDenotTransformer(Phases.checkCapturesPhase) profiler.finished() - } val runCtx = ctx.fresh runCtx.setProfiler(Profiler()) @@ -271,12 +262,10 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint if (!ctx.reporter.hasErrors) Rewrites.writeBack() suppressions.runFinished(hasErrors = ctx.reporter.hasErrors) - while (finalizeActions.nonEmpty) { + while (finalizeActions.nonEmpty) val action = finalizeActions.remove(0) action() - } compiling = false - } /** Enter top-level definitions of classes and objects contained in source file `file`. * The newly added symbols replace any previously entered symbols. @@ -284,7 +273,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint * `rootTreeOrProvider`. */ def lateCompile(file: AbstractFile, typeCheck: Boolean)(using Context): Unit = - if (!files.contains(file) && !lateFiles.contains(file)) { + if (!files.contains(file) && !lateFiles.contains(file)) lateFiles += file val unit = CompilationUnit(ctx.getSource(file)) @@ -300,20 +289,19 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint ) process()(using unitCtx) - } private sealed trait PrintedTree private /*final*/ case class SomePrintedTree(phase: String, tree: String) extends PrintedTree private object NoPrintedTree extends PrintedTree - private def printTree(last: PrintedTree)(using Context): PrintedTree = { + private def printTree(last: PrintedTree)(using Context): PrintedTree = val unit = ctx.compilationUnit val fusedPhase = ctx.phase.prevMega val echoHeader = f"[[syntax trees at end of $fusedPhase%25s]] // ${unit.source}" val tree = if ctx.isAfterTyper then unit.tpdTree else unit.untpdTree val treeString = fusedPhase.show(tree) - last match { + last match case SomePrintedTree(phase, lastTreeString) if lastTreeString == treeString => report.echo(s"$echoHeader: unchanged since $phase") last @@ -326,40 +314,34 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint case _ => report.echo(s"$echoHeader\n$treeString\n") SomePrintedTree(fusedPhase.phaseName, treeString) - } - } - def compileFromStrings(scalaSources: List[String], javaSources: List[String] = Nil): Unit = { - def sourceFile(source: String, isJava: Boolean): SourceFile = { + def compileFromStrings(scalaSources: List[String], javaSources: List[String] = Nil): Unit = + def sourceFile(source: String, isJava: Boolean): SourceFile = val uuid = java.util.UUID.randomUUID().toString val ext = if (isJava) "java" else "scala" val name = s"compileFromString-$uuid.$ext" SourceFile.virtual(name, source) - } val sources = scalaSources.map(sourceFile(_, isJava = false)) ++ - javaSources.map(sourceFile(_, isJava = true)) + javaSources.map(sourceFile(_, isJava = true)) compileSources(sources) - } /** Print summary of warnings and errors encountered */ - def printSummary(): Unit = { + def printSummary(): Unit = printMaxConstraint() val r = runContext.reporter if !r.errorsReported then profile.printSummary() r.summarizeUnreportedWarnings() r.printSummary() - } - override def reset(): Unit = { + override def reset(): Unit = super[ImplicitRunInfo].reset() super[ConstraintRunInfo].reset() myCtx = null myUnits = Nil myUnitsCached = Nil - } /** Produces the following contexts, from outermost to innermost * @@ -369,7 +351,7 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint * for type checking. * imports For each element of RootImports, an import context */ - protected def rootContext(using Context): Context = { + protected def rootContext(using Context): Context = ctx.initialize() ctx.base.setPhasePlan(comp.phases) val rootScope = new MutableScope(0) @@ -389,16 +371,14 @@ class Run(comp: Compiler, ictx: Context) extends ImplicitRunInfo with Constraint // `this` must be unchecked for safe initialization because by being passed to setRun during // initialization, it is not yet considered fully initialized by the initialization checker start.setRun(this: @unchecked) - } private var myCtx: Context | Null = rootContext(using ictx) /** The context created for this run */ given runContext[Dummy_so_its_a_def]: Context = myCtx.nn assert(runContext.runId <= Periods.MaxPossibleRunId) -} -object Run { +object Run: extension (run: Run | Null) def enrichedErrorMessage: Boolean = if run == null then false else run.myEnrichedErrorMessage def enrichErrorMessage(errorMessage: String)(using Context): String = @@ -409,4 +389,3 @@ object Run { report.enrichErrorMessage(errorMessage) else errorMessage -} diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index 399215420e65..a0e712cbd4cd 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -20,7 +20,7 @@ import config.Printers import scala.annotation.internal.sharable -object desugar { +object desugar: import untpd._ import DesugarEnums._ @@ -45,18 +45,16 @@ object desugar { val UntupledParam: Property.Key[Unit] = Property.StickyKey() /** What static check should be applied to a Match? */ - enum MatchCheck { + enum MatchCheck: case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom - } /** Is `name` the name of a method that can be invalidated as a compiler-generated * case class method if it clashes with a user-defined method? */ - def isRetractableCaseClassMethodName(name: Name)(using Context): Boolean = name match { + def isRetractableCaseClassMethodName(name: Name)(using Context): Boolean = name match case nme.apply | nme.unapply | nme.unapplySeq | nme.copy => true case DefaultGetterName(nme.copy, _) => true case _ => false - } /** Is `name` the name of a method that is added unconditionally to case classes? */ def isDesugaredCaseClassMethodName(name: Name)(using Context): Boolean = @@ -64,40 +62,34 @@ object desugar { // ----- DerivedTypeTrees ----------------------------------- - class SetterParamTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + class SetterParamTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree: def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.info.resultType) - } - class TypeRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + class TypeRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree: def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = tpd.TypeTree(sym.typeRef) - } - class TermRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + class TermRefTree(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree: def derivedTree(sym: Symbol)(using Context): tpd.Tree = tpd.ref(sym) - } /** A type tree that computes its type from an existing parameter. */ - class DerivedFromParamTree()(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree { + class DerivedFromParamTree()(implicit @constructorOnly src: SourceFile) extends DerivedTypeTree: /** Complete the appropriate constructors so that OriginalSymbol attachments are * pushed to DerivedTypeTrees. */ - override def ensureCompletions(using Context): Unit = { + override def ensureCompletions(using Context): Unit = def completeConstructor(sym: Symbol) = - sym.infoOrCompleter match { + sym.infoOrCompleter match case completer: Namer#ClassCompleter => completer.completeConstructor(sym) case _ => - } if (!ctx.owner.is(Package)) - if (ctx.owner.isClass) { + if (ctx.owner.isClass) completeConstructor(ctx.owner) if (ctx.owner.is(ModuleClass)) completeConstructor(ctx.owner.linkedClass) - } else ensureCompletions(using ctx.outer) - } /** Return info of original symbol, where all references to siblings of the * original symbol (i.e. sibling and original symbol have the same owner) @@ -108,26 +100,21 @@ object desugar { * accessor of a type parameter is a private type alias that cannot be accessed * from subclasses. */ - def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = { - val relocate = new TypeMap { + def derivedTree(sym: Symbol)(using Context): tpd.TypeTree = + val relocate = new TypeMap: val originalOwner = sym.owner - def apply(tp: Type) = tp match { + def apply(tp: Type) = tp match case tp: NamedType if tp.symbol.exists && (tp.symbol.owner eq originalOwner) => val defctx = mapCtx.outersIterator.dropWhile(_.scope eq mapCtx.scope).next() var local = defctx.denotNamed(tp.name).suchThat(_.isParamOrAccessor).symbol if (local.exists) (defctx.owner.thisType select local).dealiasKeepAnnots - else { + else def msg = em"no matching symbol for ${tp.symbol.showLocated} in ${defctx.owner} / ${defctx.effectiveScope.toList}" ErrorType(msg).assertingErrorsReported(msg) - } case _ => mapOver(tp) - } - } tpd.TypeTree(relocate(sym.info)) - } - } /** A type definition copied from `tdef` with a rhs typetree derived from it */ def derivedTypeParam(tdef: TypeDef)(using Context): TypeDef = @@ -152,12 +139,11 @@ object desugar { * - all trait members * - all package object members */ - def isSetterNeeded(valDef: ValDef)(using Context): Boolean = { + def isSetterNeeded(valDef: ValDef)(using Context): Boolean = val mods = valDef.mods mods.is(Mutable) && ctx.owner.isClass && (!mods.is(Private) || ctx.owner.is(Trait) || ctx.owner.isPackageObject) - } /** var x: Int = expr * ==> @@ -203,20 +189,18 @@ object desugar { end valDef def makeImplicitParameters(tpts: List[Tree], implicitFlag: FlagSet, forPrimaryConstructor: Boolean = false)(using Context): List[ValDef] = - for (tpt <- tpts) yield { + for (tpt <- tpts) yield val paramFlags: FlagSet = if (forPrimaryConstructor) LocalParamAccessor else Param val epname = EvidenceParamName.fresh() ValDef(epname, tpt, EmptyTree).withFlags(paramFlags | implicitFlag) - } def mapParamss(paramss: List[ParamClause]) (mapTypeParam: TypeDef => TypeDef) (mapTermParam: ValDef => ValDef)(using Context): List[ParamClause] = - paramss.mapConserve { + paramss.mapConserve: case TypeDefs(tparams) => tparams.mapConserve(mapTypeParam) case ValDefs(vparams) => vparams.mapConserve(mapTermParam) case _ => unreachable() - } /** 1. Expand context bounds to evidence params. E.g., * @@ -290,11 +274,10 @@ object desugar { rhs cpy.TypeDef(tparam)(rhs = dropInRhs(tparam.rhs)) - def paramssNoRHS = mapParamss(meth.paramss)(identity) { + def paramssNoRHS = mapParamss(meth.paramss)(identity): vparam => if vparam.rhs.isEmpty then vparam else cpy.ValDef(vparam)(rhs = EmptyTree).withMods(vparam.mods | HasDefault) - } def getterParamss(n: Int): List[ParamClause] = mapParamss(takeUpTo(paramssNoRHS, n)) { @@ -335,8 +318,8 @@ object desugar { * * Note that the splice `$t: T` will be typed as `${t: Expr[T]}` */ - def quotedPattern(tree: untpd.Tree, expectedTpt: untpd.Tree)(using Context): untpd.Tree = { - def adaptToExpectedTpt(tree: untpd.Tree): untpd.Tree = tree match { + def quotedPattern(tree: untpd.Tree, expectedTpt: untpd.Tree)(using Context): untpd.Tree = + def adaptToExpectedTpt(tree: untpd.Tree): untpd.Tree = tree match // Add the expected type as an ascription case _: untpd.SplicePattern => untpd.Typed(tree, expectedTpt).withSpan(tree.span) @@ -359,9 +342,7 @@ object desugar { // Tree does not need to be ascribed case _ => tree - } adaptToExpectedTpt(tree) - } /** Add all evidence parameters in `params` as implicit parameters to `meth`. * If the parameters of `meth` end in an implicit parameter list or using clause, @@ -382,26 +363,23 @@ object desugar { /** The implicit evidence parameters of `meth`, as generated by `desugar.defDef` */ private def evidenceParams(meth: DefDef)(using Context): List[ValDef] = - meth.paramss.reverse match { + meth.paramss.reverse match case ValDefs(vparams @ (vparam :: _)) :: _ if vparam.mods.isOneOf(GivenOrImplicit) => vparams.takeWhile(_.name.is(EvidenceParamName)) case _ => Nil - } @sharable private val synthetic = Modifiers(Synthetic) - private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = { + private def toDefParam(tparam: TypeDef, keepAnnotations: Boolean): TypeDef = var mods = tparam.rawMods if (!keepAnnotations) mods = mods.withAnnotations(Nil) tparam.withMods(mods & EmptyFlags | Param) - } - private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = { + private def toDefParam(vparam: ValDef, keepAnnotations: Boolean, keepDefault: Boolean): ValDef = var mods = vparam.rawMods if (!keepAnnotations) mods = mods.withAnnotations(Nil) val hasDefault = if keepDefault then HasDefault else EmptyFlags vparam.withMods(mods & (GivenOrImplicit | Erased | hasDefault) | Param) - } def mkApply(fn: Tree, paramss: List[ParamClause])(using Context): Tree = paramss.foldLeft(fn) { (fn, params) => params match @@ -426,12 +404,11 @@ object desugar { var defaultGetters: List[Tree] = Nil - def decompose(ddef: Tree): DefDef = ddef match { + def decompose(ddef: Tree): DefDef = ddef match case meth: DefDef => meth case Thicket((meth: DefDef) :: defaults) => defaultGetters = defaults meth - } val constr1 = decompose(defDef(impl.constr, isPrimaryConstructor = true)) @@ -441,16 +418,14 @@ object desugar { // go in `constr`, the constructor after desugaring. /** Does `tree' look like a reference to AnyVal? Temporary test before we have inline classes */ - def isAnyVal(tree: Tree): Boolean = tree match { + def isAnyVal(tree: Tree): Boolean = tree match case Ident(tpnme.AnyVal) => true case Select(qual, tpnme.AnyVal) => isScala(qual) case _ => false - } - def isScala(tree: Tree): Boolean = tree match { + def isScala(tree: Tree): Boolean = tree match case Ident(nme.scala) => true case Select(Ident(nme.ROOTPKG), nme.scala) => true case _ => false - } def namePos = cdef.sourcePos.withSpan(cdef.nameSpan) @@ -467,13 +442,12 @@ object desugar { val originalVparamss = asTermOnly(constr1.trailingParamss) lazy val derivedEnumParams = enumClass.typeParams.map(derivedTypeParamWithVariance) val impliedTparams = - if (isEnumCase) { + if (isEnumCase) val tparamReferenced = typeParamIsReferenced( enumClass.typeParams, originalTparams, originalVparamss, parents) if (originalTparams.isEmpty && (parents.isEmpty || tparamReferenced)) derivedEnumParams.map(tdef => tdef.withFlags(tdef.mods.flags | PrivateLocal)) else originalTparams - } else originalTparams if mods.is(Trait) then @@ -486,15 +460,13 @@ object desugar { // annotations on class _value_ parameters. val constrTparams = impliedTparams.map(toDefParam(_, keepAnnotations = false)) val constrVparamss = - if (originalVparamss.isEmpty) { // ensure parameter list is non-empty + if (originalVparamss.isEmpty) // ensure parameter list is non-empty if (isCaseClass) report.error(CaseClassMissingParamList(cdef), namePos) ListOfNil - } - else if (isCaseClass && originalVparamss.head.exists(_.mods.isOneOf(GivenOrImplicit))) { + else if (isCaseClass && originalVparamss.head.exists(_.mods.isOneOf(GivenOrImplicit))) report.error(CaseClassMissingNonImplicitParamList(cdef), namePos) ListOfNil - } else originalVparamss.nestedMap(toDefParam(_, keepAnnotations = true, keepDefault = true)) val derivedTparams = constrTparams.zipWithConserve(impliedTparams)((tparam, impliedParam) => @@ -505,10 +477,10 @@ object desugar { val constr = cpy.DefDef(constr1)(paramss = joinParams(constrTparams, constrVparamss)) - val (normalizedBody, enumCases, enumCompanionRef) = { + val (normalizedBody, enumCases, enumCompanionRef) = // Add constructor type parameters and evidence implicit parameters // to auxiliary constructors; set defaultGetters as a side effect. - def expandConstructor(tree: Tree) = tree match { + def expandConstructor(tree: Tree) = tree match case ddef: DefDef if ddef.name.isConstructorName => decompose( defDef( @@ -517,26 +489,23 @@ object desugar { evidenceParams(constr1).map(toDefParam(_, keepAnnotations = false, keepDefault = false))))) case stat => stat - } // The Identifiers defined by a case - def caseIds(tree: Tree): List[Ident] = tree match { + def caseIds(tree: Tree): List[Ident] = tree match case tree: MemberDef => Ident(tree.name.toTermName) :: Nil case PatDef(_, ids: List[Ident] @ unchecked, _, _) => ids - } val stats0 = impl.body.map(expandConstructor) val stats = if (ctx.owner eq defn.ScalaPackageClass) && defn.hasProblematicGetClass(className) then - stats0.filterConserve { + stats0.filterConserve: case ddef: DefDef => ddef.name ne nme.getClass_ case _ => true - } else stats0 - if (isEnum) { + if (isEnum) val (enumCases, enumStats) = stats.partition(DesugarEnums.isEnumCase) if (enumCases.isEmpty) report.error(EnumerationsShouldNotBeEmpty(cdef), namePos) @@ -550,9 +519,7 @@ object desugar { ) ) (enumImport :: enumStats, enumCases, enumCompanionRef) - } else (stats, Nil, EmptyTree) - } def anyRef = ref(defn.AnyRefAlias.typeRef) @@ -564,32 +531,27 @@ object desugar { (if (args.isEmpty) tycon else AppliedTypeTree(tycon, args)) .withSpan(cdef.span.startPos) - def isHK(tparam: Tree): Boolean = tparam match { + def isHK(tparam: Tree): Boolean = tparam match case TypeDef(_, LambdaTypeTree(tparams, body)) => true case TypeDef(_, rhs: DerivedTypeTree) => isHK(rhs.watched) case _ => false - } - def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = { - val targs = for (tparam <- tparams) yield { + def appliedRef(tycon: Tree, tparams: List[TypeDef] = constrTparams, widenHK: Boolean = false) = + val targs = for (tparam <- tparams) yield val targ = refOfDef(tparam) - def fullyApplied(tparam: Tree): Tree = tparam match { + def fullyApplied(tparam: Tree): Tree = tparam match case TypeDef(_, LambdaTypeTree(tparams, body)) => AppliedTypeTree(targ, tparams.map(_ => WildcardTypeBoundsTree())) case TypeDef(_, rhs: DerivedTypeTree) => fullyApplied(rhs.watched) case _ => targ - } if (widenHK) fullyApplied(tparam) else targ - } appliedTypeTree(tycon, targs) - } - def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match { + def isRepeated(tree: Tree): Boolean = stripByNameType(tree) match case PostfixOp(_, Ident(tpnme.raw.STAR)) => true case _ => false - } // a reference to the class type bound by `cdef`, with type parameters coming from the constructor val classTypeRef = appliedRef(classTycon) @@ -600,11 +562,10 @@ object desugar { enumClassRef else if (originalTparams.isEmpty) appliedRef(enumClassRef) - else { + else report.error(TypedCaseDoesNotExplicitlyExtendTypedEnum(enumClass, cdef) , cdef.srcPos.startPos) appliedTypeTree(enumClassRef, constrTparams map (_ => anyRef)) - } // new C[Ts](paramss) lazy val creatorExpr = @@ -620,10 +581,9 @@ object desugar { else constrVparamss val nu = vparamss.foldLeft(makeNew(classTypeRef)) { (nu, vparams) => val app = Apply(nu, vparams.map(refOfDef)) - vparams match { + vparams match case vparam :: _ if vparam.mods.is(Given) => app.setApplyKind(ApplyKind.Using) case _ => app - } } ensureApplied(nu) @@ -635,7 +595,7 @@ object desugar { // def _N: TN = this.pN (unless already given as valdef or parameterless defdef) // def copy(p1: T1 = p1..., pN: TN = pN)(moreParams) = // new C[...](p1, ..., pN)(moreParams) - val (caseClassMeths, enumScaffolding) = { + val (caseClassMeths, enumScaffolding) = def syntheticProperty(name: TermName, tpt: Tree, rhs: Tree) = val mods = if ctx.settings.Yscala2Stdlib.value then synthetic | Inline @@ -644,12 +604,11 @@ object desugar { def productElemMeths = val caseParams = derivedVparamss.head.toArray - val selectorNamesInBody = normalizedBody.collect { + val selectorNamesInBody = normalizedBody.collect: case vdef: ValDef if vdef.name.isSelectorName => vdef.name case ddef: DefDef if ddef.name.isSelectorName && ddef.paramss.isEmpty => ddef.name - } for i <- List.range(0, arity) selName = nme.selectorName(i) if (selName ne caseParams(i).name) && !selectorNamesInBody.contains(selName) @@ -661,12 +620,11 @@ object desugar { val (ordinal, scaffolding) = nextOrdinal(className, CaseKind.Class, definesEnumLookupMethods(cdef)) (ordinalMethLit(ordinal) :: Nil, scaffolding) else (Nil, Nil) - def copyMeths = { - val hasRepeatedParam = constrVparamss.nestedExists { + def copyMeths = + val hasRepeatedParam = constrVparamss.nestedExists: case ValDef(_, tpt, _) => isRepeated(tpt) - } if (mods.is(Abstract) || hasRepeatedParam) Nil // cannot have default arguments for repeated parameters, hence copy method is not issued - else { + else val copyFirstParams = derivedVparamss.head.map(vparam => cpy.ValDef(vparam)(rhs = refOfDef(vparam))) val copyRestParamss = derivedVparamss.tail.nestedMap(vparam => @@ -677,14 +635,11 @@ object desugar { TypeTree(), creatorExpr ).withMods(Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags, constr1.mods.privateWithin)) :: Nil - } - } if isCaseClass then val (enumMeths, enumScaffolding) = enumCaseMeths (copyMeths ::: enumMeths ::: productElemMeths, enumScaffolding) else (Nil, Nil) - } var parents1 = parents if (isEnumCase && parents.isEmpty) @@ -700,7 +655,7 @@ object desugar { // The thicket which is the desugared version of the companion object // synthetic object C extends parentTpt derives class-derived { defs } - def companionDefs(parentTpt: Tree, defs: List[Tree]) = { + def companionDefs(parentTpt: Tree, defs: List[Tree]) = val mdefs = moduleDef( ModuleDef( className.toTermName, Template(emptyConstructor, parentTpt :: Nil, companionDerived, EmptyValDef, defs)) @@ -710,7 +665,6 @@ object desugar { for (case modClsDef @ TypeDef(_, _) <- mdefs) modClsDef.putAttachment(DerivingCompanion, impl.srcPos.startPos) mdefs - } val companionMembers = defaultGetters ::: enumCases @@ -725,10 +679,10 @@ object desugar { // (T11, ..., T1N) => ... => (TM1, ..., TMN) => C // For all other classes, the parent is AnyRef. val companions = - if (isCaseClass) { + if (isCaseClass) val applyMeths = if (mods.is(Abstract)) Nil - else { + else val appMods = Modifiers(Synthetic | constr1.mods.flags & copiedAccessFlags).withPrivateWithin(constr1.mods.privateWithin) val appParamss = @@ -736,8 +690,7 @@ object desugar { ap.withMods(ap.mods | (cp.mods.flags & HasDefault))) DefDef(nme.apply, joinParams(derivedTparams, appParamss), TypeTree(), creatorExpr) .withMods(appMods) :: Nil - } - val unapplyMeth = { + val unapplyMeth = def scala2LibCompatUnapplyRhs(unapplyParamName: Name) = assert(arity <= Definitions.MaxTupleArity, "Unexpected case class with tuple larger than 22: "+ cdef.show) if arity == 1 then Apply(scalaDot(nme.Option), Select(Ident(unapplyParamName), nme._1)) @@ -746,9 +699,8 @@ object desugar { val members = List.tabulate(arity) { n => Select(Ident(unapplyParamName), s"_${n+1}".toTermName) } Apply(scalaDot(nme.Option), Apply(tupleApply, members)) - val hasRepeatedParam = constrVparamss.head.exists { + val hasRepeatedParam = constrVparamss.head.exists: case ValDef(_, tpt, _) => isRepeated(tpt) - } val methName = if (hasRepeatedParam) nme.unapplySeq else nme.unapply val unapplyParam = makeSyntheticParameter(tpt = classTypeRef) val unapplyRHS = @@ -763,24 +715,21 @@ object desugar { unapplyResTp, unapplyRHS ).withMods(synthetic) - } val toStringMeth = DefDef(nme.toString_, Nil, TypeTree(), Literal(Constant(className.toString))).withMods(Modifiers(Override | Synthetic)) companionDefs(anyRef, applyMeths ::: unapplyMeth :: toStringMeth :: companionMembers) - } else if (companionMembers.nonEmpty || companionDerived.nonEmpty || isEnum) companionDefs(anyRef, companionMembers) else if (isValueClass) companionDefs(anyRef, Nil) else Nil - enumCompanionRef match { + enumCompanionRef match case ref: TermRefTree => // have the enum import watch the companion object val (modVal: ValDef) :: _ = companions: @unchecked ref.watching(modVal) case _ => - } // For an implicit class C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, .., pMN: TMN), the method // synthetic implicit C[Ts](p11: T11, ..., p1N: T1N) ... (pM1: TM1, ..., pMN: TMN): C[Ts] = @@ -788,29 +737,24 @@ object desugar { val implicitWrappers = if (!mods.isOneOf(GivenOrImplicit)) Nil - else if (ctx.owner.is(Package)) { + else if (ctx.owner.is(Package)) report.error(TopLevelImplicitClass(cdef), cdef.srcPos) Nil - } - else if (mods.is(Trait)) { + else if (mods.is(Trait)) report.error(TypesAndTraitsCantBeImplicit(), cdef.srcPos) Nil - } - else if (isCaseClass) { + else if (isCaseClass) report.error(ImplicitCaseClass(cdef), cdef.srcPos) Nil - } - else if (arity != 1 && !mods.is(Given)) { + else if (arity != 1 && !mods.is(Given)) report.error(ImplicitClassPrimaryConstructorArity(), cdef.srcPos) Nil - } - else { - val defParamss = constrVparamss match { + else + val defParamss = constrVparamss match case Nil :: paramss => paramss // drop leading () that got inserted by class // TODO: drop this once we do not silently insert empty class parameters anymore case paramss => paramss - } // implicit wrapper is typechecked in same scope as constructor, so // we can reuse the constructor parameters; no derived params are needed. DefDef( @@ -818,30 +762,25 @@ object desugar { classTypeRef, creatorExpr) .withMods(companionMods | mods.flags.toTermFlags & (GivenOrImplicit | Inline) | Final) .withSpan(cdef.span) :: Nil - } - val self1 = { + val self1 = val selfType = if (self.tpt.isEmpty) classTypeRef else self.tpt if (self.isEmpty) self else cpy.ValDef(self)(tpt = selfType).withMods(self.mods | SelfName) - } - val cdef1 = addEnumFlags { - val tparamAccessors = { + val cdef1 = addEnumFlags: + val tparamAccessors = val impliedTparamsIt = impliedTparams.iterator derivedTparams.map(_.withMods(impliedTparamsIt.next().mods)) - } val caseAccessor = if (isCaseClass) CaseAccessor else EmptyFlags - val vparamAccessors = { + val vparamAccessors = val originalVparamsIt = originalVparamss.iterator.flatten - derivedVparamss match { + derivedVparamss match case first :: rest => first.map(_.withMods(originalVparamsIt.next().mods | caseAccessor)) ++ rest.flatten.map(_.withMods(originalVparamsIt.next().mods)) case _ => Nil - } - } if mods.isAllOf(Given | Inline | Transparent) then report.error("inline given instances cannot be trasparent", cdef) val classMods = if mods.is(Given) then mods &~ (Inline | Transparent) | Synthetic else mods @@ -850,13 +789,11 @@ object desugar { rhs = cpy.Template(impl)(constr, parents1, clsDerived, self1, tparamAccessors ::: vparamAccessors ::: normalizedBody ::: caseClassMeths) ).withMods(classMods) - } // install the watch on classTycon - classTycon match { + classTycon match case tycon: DerivedTypeTree => tycon.watching(cdef1) case _ => - } flatTree(cdef1 :: companions ::: implicitWrappers ::: enumScaffolding) }.showing(i"desugared: $cdef --> $result", Printers.desugar) @@ -891,7 +828,7 @@ object desugar { * val name: name$ = New(name$) * final class name$ extends parents { self: name.type => body } */ - def moduleDef(mdef: ModuleDef)(using Context): Tree = { + def moduleDef(mdef: ModuleDef)(using Context): Tree = val impl = mdef.impl val mods = mdef.mods val moduleName = normalizeName(mdef, impl).asTermName @@ -900,12 +837,11 @@ object desugar { if (mods.is(Package)) packageModuleDef(mdef) - else if (isEnumCase) { + else if (isEnumCase) typeParamIsReferenced(enumClass.typeParams, Nil, Nil, impl.parents) // used to check there are no illegal references to enum's type parameters in parents expandEnumModule(moduleName, impl, mods, definesEnumLookupMethods(mdef), mdef.span) - } - else { + else val clsName = moduleName.moduleClassName val clsRef = Ident(clsName) val modul = ValDef(moduleName, clsRef, New(clsRef, Nil)) @@ -922,8 +858,6 @@ object desugar { .withMods(mods.toTypeFlags & RetainedModuleClassFlags | ModuleClassCreationFlags) .withEndMarker(copyFrom = mdef) // copy over the end marker position to the module class def Thicket(modul, classDef(cls).withSpan(mdef.span)) - } - } def extMethod(mdef: DefDef, extParamss: List[ParamClause])(using Context): DefDef = cpy.DefDef(mdef)( @@ -965,12 +899,10 @@ object desugar { ).withMods(mdef.mods | ExtensionMethod) /** Transform extension construct to list of extension methods */ - def extMethods(ext: ExtMethods)(using Context): Tree = flatTree { - ext.methods map { + def extMethods(ext: ExtMethods)(using Context): Tree = flatTree: + ext.methods map: case exp: Export => exp case mdef: DefDef => defDef(extMethod(mdef, ext.paramss)) - } - } /** Transforms * * type t >: Low <: Hi @@ -980,7 +912,7 @@ object desugar { * * if the type has a pattern variable name */ - def quotedPatternTypeDef(tree: TypeDef)(using Context): TypeDef = { + def quotedPatternTypeDef(tree: TypeDef)(using Context): TypeDef = assert(ctx.mode.is(Mode.QuotedPattern)) if tree.name.isVarPattern && !tree.isBackquoted then val patternTypeAnnot = New(ref(defn.QuotedRuntimePatterns_patternTypeAnnot.typeRef)).withSpan(tree.span) @@ -994,7 +926,6 @@ object desugar { tree.srcPos) tree else tree - } def checkPackageName(mdef: ModuleDef | PackageDef)(using Context): Unit = @@ -1023,17 +954,15 @@ object desugar { * 2. If the name is missing (this can be the case for instance definitions), * invent one instead. */ - def normalizeName(mdef: MemberDef, impl: Tree)(using Context): Name = { + def normalizeName(mdef: MemberDef, impl: Tree)(using Context): Name = var name = mdef.name if (name.isEmpty) name = name.likeSpaced(inventGivenOrExtensionName(impl)) def errPos = mdef.source.atSpan(mdef.nameSpan) - if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) { + if (ctx.owner == defn.ScalaPackageClass && defn.reservedScalaClassNames.contains(name.toTypeName)) val kind = if (name.isTypeName) "class" else "object" report.error(IllegalRedefinitionOfStandardKind(kind, name), errPos) name = name.errorName - } name - } /** Invent a name for an anonympus given of type or template `impl`. */ def inventGivenOrExtensionName(impl: Tree)(using Context): SimpleName = @@ -1048,12 +977,12 @@ object desugar { "given_" ++ inventTypeName(impl) str.toTermName.asSimpleName - private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String] { + private class NameExtractor(followArgs: Boolean) extends UntypedTreeAccumulator[String]: private def extractArgs(args: List[Tree])(using Context): String = args.map(argNameExtractor.apply("", _)).mkString("_") override def apply(x: String, tree: Tree)(using Context): String = if (x.isEmpty) - tree match { + tree match case Select(pre, nme.CONSTRUCTOR) => foldOver(x, pre) case tree: RefTree => if tree.name.isTypeName then tree.name.toString @@ -1072,9 +1001,7 @@ object desugar { if followArgs then s"${extractArgs(tree.args)}_to_${apply("", tree.body)}" else "Function" case _ => foldOver(x, tree) - } else x - } private val typeNameExtractor = NameExtractor(followArgs = true) private val argNameExtractor = NameExtractor(followArgs = false) @@ -1094,7 +1021,7 @@ object desugar { * ==> * expandSimpleEnumCase([case e1]); ...; expandSimpleEnumCase([case eN]) */ - def patDef(pdef: PatDef)(using Context): Tree = flatTree { + def patDef(pdef: PatDef)(using Context): Tree = flatTree: val PatDef(mods, pats, tpt, rhs) = pdef if mods.isEnumCase then def expand(id: Ident, definesLookups: Boolean) = @@ -1106,11 +1033,9 @@ object desugar { ids.init.map(expand(_, false)) ::: expand(ids.last, true) :: Nil else ids.map(expand(_, false)) - else { + else val pats1 = if (tpt.isEmpty) pats else pats map (Typed(_, tpt)) pats1 map (makePatDef(pdef, mods, _, rhs)) - } - } /** The selector of a match, which depends of the given `checkMode`. * @param sel the original selector @@ -1149,7 +1074,7 @@ object desugar { * If the original pattern variable carries a type annotation, so does the corresponding * ValDef or DefDef. */ - def makePatDef(original: Tree, mods: Modifiers, pat: Tree, rhs: Tree)(using Context): Tree = pat match { + def makePatDef(original: Tree, mods: Modifiers, pat: Tree, rhs: Tree)(using Context): Tree = pat match case IdPattern(id, tpt) => val id1 = if id.name == nme.WILDCARD @@ -1167,15 +1092,13 @@ object desugar { |please bind to an identifier and use an alias given.""", bind) false - def isTuplePattern(arity: Int): Boolean = pat match { + def isTuplePattern(arity: Int): Boolean = pat match case Tuple(pats) if pats.size == arity => pats.forall(isVarPattern) case _ => false - } - val isMatchingTuple: Tree => Boolean = { + val isMatchingTuple: Tree => Boolean = case Tuple(es) => isTuplePattern(es.length) case _ => false - } // We can only optimize `val pat = if (...) e1 else e2` if: // - `e1` and `e2` are both tuples of arity N @@ -1206,7 +1129,7 @@ object desugar { else val caseDef = CaseDef(pat, EmptyTree, makeTuple(ids)) Match(makeSelector(rhs, MatchCheck.IrrefutablePatDef), caseDef :: Nil) - vars match { + vars match case Nil if !mods.is(Lazy) => matchExpr case (named, tpt) :: Nil => @@ -1236,30 +1159,25 @@ object desugar { .withSpan(named.span) ) flatTree(firstDef :: restDefs) - } - } /** Expand variable identifier x to x @ _ */ - def patternVar(tree: Tree)(using Context): Bind = { + def patternVar(tree: Tree)(using Context): Bind = val Ident(name) = unsplice(tree): @unchecked Bind(name, Ident(nme.WILDCARD)).withSpan(tree.span) - } /** The type of tests that check whether a MemberDef is OK for some flag. * The test succeeds if the partial function is defined and returns true. */ type MemberDefTest = PartialFunction[MemberDef, Boolean] - val legalOpaque: MemberDefTest = { + val legalOpaque: MemberDefTest = case TypeDef(_, rhs) => - def rhsOK(tree: Tree): Boolean = tree match { + def rhsOK(tree: Tree): Boolean = tree match case bounds: TypeBoundsTree => !bounds.alias.isEmpty case _: Template | _: MatchTypeTree => false case LambdaTypeTree(_, body) => rhsOK(body) case _ => true - } rhsOK(rhs) - } def checkOpaqueAlias(tree: MemberDef)(using Context): MemberDef = def check(rhs: Tree): MemberDef = rhs match @@ -1276,23 +1194,22 @@ object desugar { /** Check that modifiers are legal for the definition `tree`. * Right now, we only check for `opaque`. TODO: Move other modifier checks here. */ - def checkModifiers(tree: Tree)(using Context): Tree = tree match { + def checkModifiers(tree: Tree)(using Context): Tree = tree match case tree: MemberDef => var tested: MemberDef = tree def checkApplicable(flag: Flag, test: MemberDefTest): MemberDef = - if (tested.mods.is(flag) && !test.applyOrElse(tree, (md: MemberDef) => false)) { + if (tested.mods.is(flag) && !test.applyOrElse(tree, (md: MemberDef) => false)) report.error(ModifierNotAllowedForDefinition(flag), tree.srcPos) - tested.withMods(tested.mods.withoutFlags(flag)) - } else tested + tested.withMods(tested.mods.withoutFlags(flag)) + else tested tested = checkOpaqueAlias(tested) tested = checkApplicable(Opaque, legalOpaque) tested case _ => tree - } def defTree(tree: Tree)(using Context): Tree = - checkModifiers(tree) match { + checkModifiers(tree) match case tree: ValDef => valDef(tree) case tree: TypeDef => if (tree.isClassDef) classDef(tree) @@ -1303,30 +1220,27 @@ object desugar { else defDef(tree) case tree: ModuleDef => moduleDef(tree) case tree: PatDef => patDef(tree) - } /** { stats; } * ==> * { stats; () } */ - def block(tree: Block)(using Context): Block = tree.expr match { + def block(tree: Block)(using Context): Block = tree.expr match case EmptyTree => cpy.Block(tree)(tree.stats, unitLiteral.withSpan(if (tree.stats.isEmpty) tree.span else tree.span.endPos)) case _ => tree - } /** Translate infix operation expression * * l op r ==> l.op(r) if op is left-associative * ==> r.op(l) if op is right-associative */ - def binop(left: Tree, op: Ident, right: Tree)(using Context): Apply = { - def assignToNamedArg(arg: Tree) = arg match { + def binop(left: Tree, op: Ident, right: Tree)(using Context): Apply = + def assignToNamedArg(arg: Tree) = arg match case Assign(Ident(name), rhs) => cpy.NamedArg(arg)(name, rhs) case _ => arg - } def makeOp(fn: Tree, arg: Tree, selectPos: Span) = val sel = Select(fn, op.name).withSpan(selectPos) if (left.sourcePos.endLine < op.sourcePos.startLine) @@ -1345,7 +1259,6 @@ object desugar { makeOp(right, left, Span(op.span.start, right.span.end)) else makeOp(left, right, Span(left.span.start, op.span.end, op.span.start)) - } /** Translate throws type `A throws E1 | ... | En` to * $throws[... $throws[A, E1] ... , En]. @@ -1365,7 +1278,7 @@ object desugar { * (t) ==> t * (t1, ..., tN) ==> TupleN(t1, ..., tN) */ - def smallTuple(tree: Tuple)(using Context): Tree = { + def smallTuple(tree: Tuple)(using Context): Tree = val ts = tree.trees val arity = ts.length assert(arity <= Definitions.MaxTupleArity) @@ -1374,7 +1287,6 @@ object desugar { if (ctx.mode is Mode.Type) TypeTree(defn.UnitType) else unitLiteral else if (ctx.mode is Mode.Type) AppliedTypeTree(ref(tupleTypeRef), ts) else Apply(ref(tupleTypeRef.classSymbol.companionModule.termRef), ts) - } private def isTopLevelDef(stat: Tree)(using Context): Boolean = stat match case _: ValDef | _: PatDef | _: DefDef | _: Export | _: ExtMethods => true @@ -1403,29 +1315,25 @@ object desugar { * - "companion objects" of wrapped type definitions * (i.e. objects having the same name as a wrapped type) */ - def packageDef(pdef: PackageDef)(using Context): PackageDef = { + def packageDef(pdef: PackageDef)(using Context): PackageDef = checkPackageName(pdef) - val wrappedTypeNames = pdef.stats.collect { + val wrappedTypeNames = pdef.stats.collect: case stat: TypeDef if isTopLevelDef(stat) => stat.name - } def inPackageObject(stat: Tree) = - isTopLevelDef(stat) || { + isTopLevelDef(stat) `||`: stat match case stat: ModuleDef => wrappedTypeNames.contains(stat.name.stripModuleClassSuffix.toTypeName) case _ => false - } val (nestedStats, topStats) = pdef.stats.partition(inPackageObject) if (nestedStats.isEmpty) pdef - else { + else val name = packageObjectName(ctx.source) val grouped = ModuleDef(name, Template(emptyConstructor, Nil, Nil, EmptyValDef, nestedStats)) .withMods(Modifiers(Synthetic)) cpy.PackageDef(pdef)(pdef.pid, topStats :+ grouped) - } - } /** Make closure corresponding to function. * params => body @@ -1450,11 +1358,10 @@ object desugar { * * (x$1, ..., x$n) => (x$0, ..., x${n-1} @unchecked?) match { cases } */ - def makeCaseLambda(cases: List[CaseDef], checkMode: MatchCheck, nparams: Int = 1)(using Context): Function = { + def makeCaseLambda(cases: List[CaseDef], checkMode: MatchCheck, nparams: Int = 1)(using Context): Function = val params = (1 to nparams).toList.map(makeSyntheticParameter(_)) val selector = makeTuple(params.map(p => Ident(p.name))) Function(params, Match(makeSelector(selector, checkMode), cases)) - } /** Map n-ary function `(x1: T1, ..., xn: Tn) => body` where n != 1 to unary function as follows: * @@ -1477,27 +1384,25 @@ object desugar { * If some of the Ti's are absent, omit the : (T1, ..., Tn) type ascription * in the selector. */ - def makeTupledFunction(params: List[ValDef], body: Tree, isGenericTuple: Boolean)(using Context): Tree = { + def makeTupledFunction(params: List[ValDef], body: Tree, isGenericTuple: Boolean)(using Context): Tree = val param = makeSyntheticParameter( tpt = if params.exists(_.tpt.isEmpty) then TypeTree() else Tuple(params.map(_.tpt)), - flags = + flags = if params.nonEmpty && params.head.mods.is(Given) then SyntheticTermParam | Given else SyntheticTermParam) def selector(n: Int) = if (isGenericTuple) Apply(Select(refOfDef(param), nme.apply), Literal(Constant(n))) else Select(refOfDef(param), nme.selectorName(n)) val vdefs = - params.zipWithIndex.map { + params.zipWithIndex.map: case (param, idx) => ValDef(param.name, param.tpt, selector(idx)) .withSpan(param.span) .withAttachment(UntupledParam, ()) .withFlags(Synthetic) - } Function(param :: Nil, Block(vdefs, body)) - } /** Convert a tuple pattern with given `elems` to a sequence of `ValDefs`, * skipping elements that are not convertible. @@ -1509,23 +1414,20 @@ object desugar { case Typed(elem1, tpt1) => toParam(elem1, tpt1) case Ident(id: TermName) => ValDef(id, tpt, EmptyTree).withFlags(Param) case _ => EmptyTree - elems.map(param => toParam(param, TypeTree()).withSpan(param.span)).collect { + elems.map(param => toParam(param, TypeTree()).withSpan(param.span)).collect: case vd: ValDef => vd - } - def makeContextualFunction(formals: List[Tree], body: Tree, erasedParams: List[Boolean])(using Context): Function = { + def makeContextualFunction(formals: List[Tree], body: Tree, erasedParams: List[Boolean])(using Context): Function = val mods = Given val params = makeImplicitParameters(formals, mods) FunctionWithMods(params, body, Modifiers(mods), erasedParams) - } - private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = { + private def derivedValDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(using Context) = val vdef = ValDef(named.name.asTermName, tpt, rhs) .withMods(mods) .withSpan(original.span.withPoint(named.span.start)) val mayNeedSetter = valDef(vdef) mayNeedSetter - } private def derivedDefDef(original: Tree, named: NameTree, tpt: Tree, rhs: Tree, mods: Modifiers)(implicit src: SourceFile) = DefDef(named.name.asTermName, Nil, tpt, rhs) @@ -1533,7 +1435,7 @@ object desugar { .withSpan(original.span.withPoint(named.span.start)) /** Main desugaring method */ - def apply(tree: Tree, pt: Type = NoType)(using Context): Tree = { + def apply(tree: Tree, pt: Type = NoType)(using Context): Tree = /** Create tree for for-comprehension `` or * `` where mapName and flatMapName are chosen @@ -1583,14 +1485,14 @@ object desugar { * @param enums The enumerators in the for expression * @param body The body of the for expression */ - def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Tree], body: Tree): Tree = trace(i"make for ${ForYield(enums, body)}", show = true) { + def makeFor(mapName: TermName, flatMapName: TermName, enums: List[Tree], body: Tree): Tree = trace(i"make for ${ForYield(enums, body)}", show = true): /** Let `pat` be `gen`'s pattern. Make a function value `pat => body`. * If `pat` is a var pattern `id: T` then this gives `(id: T) => body`. * Otherwise this gives `{ case pat => body }`, where `pat` is checked to be * irrefutable if `gen`'s checkMode is GenCheckMode.Check. */ - def makeLambda(gen: GenFrom, body: Tree): Tree = gen.pat match { + def makeLambda(gen: GenFrom, body: Tree): Tree = gen.pat match case IdPattern(named, tpt) if gen.checkMode != GenCheckMode.FilterAlways => Function(derivedValDef(gen.pat, named, tpt, EmptyTree, Modifiers(Param)) :: Nil, body) case _ => @@ -1598,14 +1500,13 @@ object desugar { if (gen.checkMode == GenCheckMode.Check || gen.checkMode == GenCheckMode.CheckAndFilter) MatchCheck.IrrefutableGenFrom else MatchCheck.None makeCaseLambda(CaseDef(gen.pat, EmptyTree, body) :: Nil, matchCheckMode) - } /** If `pat` is not an Identifier, a Typed(Ident, _), or a Bind, wrap * it in a Bind with a fresh name. Return the transformed pattern, and the identifier * that refers to the bound variable for the pattern. Wildcard Binds are * also replaced by Binds with fresh names. */ - def makeIdPat(pat: Tree): (Tree, Ident) = pat match { + def makeIdPat(pat: Tree): (Tree, Ident) = pat match case bind @ Bind(name, pat1) => if name == nme.WILDCARD then val name = UniqueName.fresh() @@ -1616,7 +1517,6 @@ object desugar { case _ => val name = UniqueName.fresh() (Bind(name, pat), Ident(name)) - } /** Make a pattern filter: * rhs.withFilter { case pat => true case _ => false } @@ -1644,19 +1544,18 @@ object desugar { * if (f eq alwaysTrue) this // or rather identity filter monadic applied to this * else real withFilter */ - def makePatFilter(rhs: Tree, pat: Tree): Tree = { + def makePatFilter(rhs: Tree, pat: Tree): Tree = val cases = List( CaseDef(pat, EmptyTree, Literal(Constant(true))), CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))) Apply(Select(rhs, nme.withFilter), makeCaseLambda(cases, MatchCheck.None)) - } /** Is pattern `pat` irrefutable when matched against `rhs`? * We only can do a simple syntactic check here; a more refined check * is done later in the pattern matcher (see discussion in @makePatFilter). */ - def isIrrefutable(pat: Tree, rhs: Tree): Boolean = { - def matchesTuple(pats: List[Tree], rhs: Tree): Boolean = rhs match { + def isIrrefutable(pat: Tree, rhs: Tree): Boolean = + def matchesTuple(pats: List[Tree], rhs: Tree): Boolean = rhs match case Tuple(trees) => (pats corresponds trees)(isIrrefutable) case Parens(rhs1) => matchesTuple(pats, rhs1) case Block(_, rhs1) => matchesTuple(pats, rhs1) @@ -1665,14 +1564,11 @@ object desugar { case CaseDef(_, _, rhs1) => matchesTuple(pats, rhs1) case Throw(_) => true case _ => false - } - pat match { + pat match case Bind(_, pat1) => isIrrefutable(pat1, rhs) case Parens(pat1) => isIrrefutable(pat1, rhs) case Tuple(pats) => matchesTuple(pats, rhs) case _ => isVarPattern(pat) - } - } /** Is `pat` of the form `x`, `x T`, or `given T`? when used as the lhs of a generator, * these are all considered irrefutable. @@ -1691,12 +1587,11 @@ object desugar { /** rhs.name with a pattern filter on rhs unless `pat` is irrefutable when * matched against `rhs`. */ - def rhsSelect(gen: GenFrom, name: TermName) = { + def rhsSelect(gen: GenFrom, name: TermName) = val rhs = if (needsNoFilter(gen)) gen.expr else makePatFilter(gen.expr, gen.pat) Select(rhs, name) - } - enums match { + enums match case (gen: GenFrom) :: Nil => Apply(rhsSelect(gen, mapName), makeLambda(gen, body)) case (gen: GenFrom) :: (rest @ (GenFrom(_, _, _) :: _)) => @@ -1724,10 +1619,8 @@ object desugar { makeFor(mapName, flatMapName, genFrom :: rest, body) case _ => EmptyTree //may happen for erroneous input - } - } - def makePolyFunction(targs: List[Tree], body: Tree, pt: Type): Tree = body match { + def makePolyFunction(targs: List[Tree], body: Tree, pt: Type): Tree = body match case Parens(body1) => makePolyFunction(targs, body1, pt) case Block(Nil, body1) => @@ -1735,25 +1628,22 @@ object desugar { case Function(vargs, res) => assert(targs.nonEmpty) // TODO: Figure out if we need a `PolyFunctionWithMods` instead. - val mods = body match { + val mods = body match case body: FunctionWithMods => body.mods case _ => untpd.EmptyModifiers - } val polyFunctionTpt = ref(defn.PolyFunctionType) val applyTParams = targs.asInstanceOf[List[TypeDef]] - if (ctx.mode.is(Mode.Type)) { + if (ctx.mode.is(Mode.Type)) // Desugar [T_1, ..., T_M] -> (P_1, ..., P_N) => R // Into scala.PolyFunction { def apply[T_1, ..., T_M](x$1: P_1, ..., x$N: P_N): R } - val applyVParams = vargs.zipWithIndex.map { + val applyVParams = vargs.zipWithIndex.map: case (p: ValDef, _) => p.withAddedFlags(mods.flags) case (p, n) => makeSyntheticParameter(n + 1, p).withAddedFlags(mods.flags.toTermFlags) - } RefinedTypeTree(polyFunctionTpt, List( DefDef(nme.apply, applyTParams :: applyVParams :: Nil, res, EmptyTree).withFlags(Synthetic) )) - } - else { + else // Desugar [T_1, ..., T_M] -> (x_1: P_1, ..., x_N: P_N) => body // with pt [S_1, ..., S_M] -> (O_1, ..., O_N) => R // Into new scala.PolyFunction { def apply[T_1, ..., T_M](x_1: P_1, ..., x_N: P_N): R2 = body } @@ -1776,25 +1666,22 @@ object desugar { New(Template(emptyConstructor, List(polyFunctionTpt), Nil, EmptyValDef, List(DefDef(nme.apply, applyTParams :: applyVParams :: Nil, typeTree(pt), res)) )) - } case _ => // may happen for erroneous input. An error will already have been reported. assert(ctx.reporter.errorsReported) EmptyTree - } // begin desugar // Special case for `Parens` desugaring: unlike all the desugarings below, // its output is not a new tree but an existing one whose position should // be preserved, so we shouldn't call `withPos` on it. - tree match { + tree match case Parens(t) => return t case _ => - } - val desugared = tree match { + val desugared = tree match case PolyFunction(targs, body) => makePolyFunction(targs, body, pt) orElse tree case SymbolLit(str) => @@ -1802,18 +1689,16 @@ object desugar { ref(defn.ScalaSymbolClass.companionModule.termRef), Literal(Constant(str)) :: Nil) case InterpolatedString(id, segments) => - val strs = segments map { + val strs = segments map: case ts: Thicket => ts.trees.head case t => t - } val elems = segments flatMap { case ts: Thicket => ts.trees.tail case t => Nil - } map { + } map: case Block(Nil, EmptyTree) => Literal(Constant(())) // for s"... ${} ..." case Block(Nil, expr) => expr // important for interpolated string as patterns, see i1773.scala case t => t - } // This is a deliberate departure from scalac, where StringContext is not rooted (See #4732) Apply(Select(Apply(scalaDot(nme.StringContext), strs), id).withSpan(tree.span), elems) case PostfixOp(t, op) => @@ -1840,9 +1725,7 @@ object desugar { case ext: ExtMethods => Block(List(ext), Literal(Constant(())).withSpan(ext.span)) case f: FunctionWithMods if f.hasErasedParams => makeFunctionWithValDefs(f, pt) - } desugared.withSpan(tree.span) - } /** Turn a fucntion value `handlerFun` into a catch case for a try. * If `handlerFun` is a partial function, translate to @@ -1898,14 +1781,13 @@ object desugar { * The result of this method is used for validity checking, is thrown away afterwards. * @param parent The type of `parent` */ - def refinedTypeToClass(parent: tpd.Tree, refinements: List[Tree])(using Context): TypeDef = { - def stripToCore(tp: Type): List[Type] = tp match { + def refinedTypeToClass(parent: tpd.Tree, refinements: List[Tree])(using Context): TypeDef = + def stripToCore(tp: Type): List[Type] = tp match case tp: AppliedType => tp :: Nil case tp: TypeRef if tp.symbol.isClass => tp :: Nil // monomorphic class type case tp: TypeProxy => stripToCore(tp.underlying) case AndType(tp1, tp2) => stripToCore(tp1) ::: stripToCore(tp2) case _ => defn.AnyType :: Nil - } val parentCores = stripToCore(parent.tpe) val untpdParent = TypedSplice(parent) val (classParents, self) = @@ -1913,7 +1795,6 @@ object desugar { else (parentCores map TypeTree, ValDef(nme.WILDCARD, untpdParent, EmptyTree)) val impl = Template(emptyConstructor, classParents, Nil, self, refinements) TypeDef(tpnme.REFINE_CLASS, impl).withFlags(Trait) - } /** Ensure the given function tree use only ValDefs for parameters. * For example, @@ -1921,31 +1802,28 @@ object desugar { * gets converted to * FunctionWithMods(List(ValDef(x$1, A), ValDef(x$2, B)), body, mods, erasedParams) */ - def makeFunctionWithValDefs(tree: Function, pt: Type)(using Context): Function = { + def makeFunctionWithValDefs(tree: Function, pt: Type)(using Context): Function = val Function(args, result) = tree - args match { + args match case (_ : ValDef) :: _ => tree // ValDef case can be easily handled case _ if !ctx.mode.is(Mode.Type) => tree case _ => - val applyVParams = args.zipWithIndex.map { + val applyVParams = args.zipWithIndex.map: case (p, n) => makeSyntheticParameter(n + 1, p) - } tree match case tree: FunctionWithMods => untpd.FunctionWithMods(applyVParams, result, tree.mods, tree.erasedParams) case _ => untpd.Function(applyVParams, result) - } - } /** Returns list of all pattern variables, possibly with their types, * without duplicates */ - private def getVariables(tree: Tree, shouldAddGiven: Context ?=> Bind => Boolean)(using Context): List[VarInfo] = { + private def getVariables(tree: Tree, shouldAddGiven: Context ?=> Bind => Boolean)(using Context): List[VarInfo] = val buf = ListBuffer[VarInfo]() def seenName(name: Name) = buf exists (_._1.name == name) def add(named: NameTree, t: Tree): Unit = if (!seenName(named.name) && named.name.isTermName) buf += ((named, t)) - def collect(tree: Tree): Unit = tree match { + def collect(tree: Tree): Unit = tree match case tree @ Bind(nme.WILDCARD, tree1) => if tree.mods.is(Given) then val Typed(_, tpt) = tree1: @unchecked @@ -1993,14 +1871,10 @@ object desugar { collect(expr) case Quote(body, _) => new UntypedTreeTraverser { - def traverse(tree: untpd.Tree)(using Context): Unit = tree match { + def traverse(tree: untpd.Tree)(using Context): Unit = tree match case SplicePattern(body, _) => collect(body) case _ => traverseChildren(tree) - } }.traverse(body) case _ => - } collect(tree) buf.toList - } -} diff --git a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala index a1c3c0ed0775..18f9b4b4ac55 100644 --- a/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala +++ b/compiler/src/dotty/tools/dotc/ast/DesugarEnums.scala @@ -13,7 +13,7 @@ import transform.SyntheticMembers.ExtendsSingletonMirror import scala.annotation.internal.sharable /** Helper methods to desugar enums */ -object DesugarEnums { +object DesugarEnums: import untpd._ enum CaseKind: @@ -40,22 +40,19 @@ object DesugarEnums { * whether the case is still in the enum class or it has been transferred to the * companion object. */ - def enumClass(using Context): Symbol = { + def enumClass(using Context): Symbol = val cls = ctx.owner if (cls.is(Module)) cls.linkedClass else cls - } - def enumCompanion(using Context): Symbol = { + def enumCompanion(using Context): Symbol = val cls = ctx.owner if (cls.is(Module)) cls.sourceModule else cls.linkedClass.sourceModule - } /** Is `tree` an (untyped) enum case? */ - def isEnumCase(tree: Tree)(using Context): Boolean = tree match { + def isEnumCase(tree: Tree)(using Context): Boolean = tree match case tree: MemberDef => tree.mods.isEnumCase case PatDef(mods, _, _, _) => mods.isEnumCase case _ => false - } /** A reference to the enum class `E`, possibly followed by type arguments. * Each covariant type parameter is approximated by its lower bound. @@ -63,7 +60,7 @@ object DesugarEnums { * It is an error if a type parameter is non-variant, or if its approximation * refers to pther type parameters. */ - def interpolatedEnumParent(span: Span)(using Context): Tree = { + def interpolatedEnumParent(span: Span)(using Context): Tree = val tparams = enumClass.typeParams def isGround(tp: Type) = tp.subst(tparams, tparams.map(_ => NoType)) eq tp val targs = tparams map { tparam => @@ -71,16 +68,14 @@ object DesugarEnums { tparam.info.bounds.lo else if (tparam.is(Contravariant) && isGround(tparam.info.bounds.hi)) tparam.info.bounds.hi - else { + else def problem = if (!tparam.isOneOf(VarianceFlags)) "is invariant" else "has bounds that depend on a type parameter in the same parameter list" errorType(em"""cannot determine type argument for enum parent $enumClass, |type parameter $tparam $problem""", ctx.source.atSpan(span)) - } } TypeTree(enumClass.typeRef.appliedTo(targs)).withSpan(span) - } /** A type tree referring to `enumClass` */ def enumClassRef(using Context): Tree = @@ -112,7 +107,7 @@ object DesugarEnums { * case _ => throw new IllegalArgumentException("case not found: " + $name) * } */ - private def enumScaffolding(enumValues: List[RefTree])(using Context): List[Tree] = { + private def enumScaffolding(enumValues: List[RefTree])(using Context): List[Tree] = val rawEnumClassRef = rawRef(enumClass.typeRef) extension (tpe: NamedType) def ofRawEnum = AppliedTypeTree(ref(tpe), rawEnumClassRef) @@ -140,7 +135,6 @@ object DesugarEnums { privateValuesDef :: valuesDef :: valueOfDef :: Nil - } private def enumLookupMethods(constraints: EnumConstraints)(using Context): List[Tree] = def scaffolding: List[Tree] = @@ -177,7 +171,7 @@ object DesugarEnums { * def ordinal = _$ordinal // if `E` does not derive from `java.lang.Enum` * } */ - private def enumValueCreator(using Context) = { + private def enumValueCreator(using Context) = val creator = New(Template( constr = emptyConstructor, parents = enumClassRef :: scalaRuntimeDot(tpnme.EnumValue) :: Nil, @@ -188,7 +182,6 @@ object DesugarEnums { DefDef(nme.DOLLAR_NEW, List(List(param(nme.ordinalDollar_, defn.IntType), param(nme.nameDollar, defn.StringType))), TypeTree(), creator).withFlags(Private | Synthetic) - } /** Is a type parameter in `enumTypeParams` referenced from an enum class case that has * given type parameters `caseTypeParams`, value parameters `vparamss` and parents `parents`? @@ -201,18 +194,17 @@ object DesugarEnums { enumTypeParams: List[TypeSymbol], caseTypeParams: List[TypeDef], vparamss: List[List[ValDef]], - parents: List[Tree])(using Context): Boolean = { + parents: List[Tree])(using Context): Boolean = - object searchRef extends UntypedTreeAccumulator[Boolean] { + object searchRef extends UntypedTreeAccumulator[Boolean]: var tparamNames = enumTypeParams.map(_.name).toSet[Name] - def underBinders(binders: List[MemberDef], op: => Boolean): Boolean = { + def underBinders(binders: List[MemberDef], op: => Boolean): Boolean = val saved = tparamNames tparamNames = tparamNames -- binders.map(_.name) try op finally tparamNames = saved - } - def apply(x: Boolean, tree: Tree)(using Context): Boolean = x || { - tree match { + def apply(x: Boolean, tree: Tree)(using Context): Boolean = x `||`: + tree match case Ident(name) => val matches = tparamNames.contains(name) if (matches && (caseTypeParams.nonEmpty || vparamss.isEmpty)) @@ -224,31 +216,26 @@ object DesugarEnums { val refinementDefs = refinements collect { case r: MemberDef => r } underBinders(refinementDefs, foldOver(x, tree)) case _ => foldOver(x, tree) - } - } def apply(tree: Tree)(using Context): Boolean = underBinders(caseTypeParams, apply(false, tree)) - } def typeHasRef(tpt: Tree) = searchRef(tpt) def valDefHasRef(vd: ValDef) = typeHasRef(vd.tpt) - def parentHasRef(parent: Tree): Boolean = parent match { + def parentHasRef(parent: Tree): Boolean = parent match case Apply(fn, _) => parentHasRef(fn) case TypeApply(_, targs) => targs.exists(typeHasRef) case Select(nu, nme.CONSTRUCTOR) => parentHasRef(nu) case New(tpt) => typeHasRef(tpt) case parent => parent.isType && typeHasRef(parent) - } vparamss.nestedExists(valDefHasRef) || parents.exists(parentHasRef) - } /** A pair consisting of * - the next enum tag * - scaffolding containing the necessary definitions for singleton enum cases * unless that scaffolding was already generated by a previous call to `nextEnumKind`. */ - def nextOrdinal(name: Name, kind: CaseKind, definesLookups: Boolean)(using Context): (Int, List[Tree]) = { + def nextOrdinal(name: Name, kind: CaseKind, definesLookups: Boolean)(using Context): (Int, List[Tree]) = val (ordinal, seenMinKind, seenMaxKind, seenCases) = ctx.tree.removeAttachment(EnumCaseCount).getOrElse((0, CaseKind.Class, CaseKind.Simple, Nil)) val minKind = if kind.ordinal < seenMinKind.ordinal then kind else seenMinKind @@ -263,7 +250,6 @@ object DesugarEnums { else ctx.tree.pushAttachment(EnumCaseCount, (ordinal + 1, minKind, maxKind, cases)) (ordinal, Nil) - } def param(name: TermName, typ: Type)(using Context): ValDef = param(name, TypeTree(typ)) def param(name: TermName, tpt: Tree)(using Context): ValDef = ValDef(name, tpt, EmptyTree).withFlags(Param) @@ -279,32 +265,27 @@ object DesugarEnums { rawRef(enumClass.typeRef), body(Ident(nme.ordinal))).withFlags(Synthetic) /** Expand a module definition representing a parameterless enum case */ - def expandEnumModule(name: TermName, impl: Template, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = { + def expandEnumModule(name: TermName, impl: Template, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = assert(impl.body.isEmpty) if (!enumClass.exists) EmptyTree else if (impl.parents.isEmpty) expandSimpleEnumCase(name, mods, definesLookups, span) - else { + else val (tag, scaffolding) = nextOrdinal(name, CaseKind.Object, definesLookups) val impl1 = cpy.Template(impl)(parents = impl.parents :+ scalaRuntimeDot(tpnme.EnumValue), body = Nil) .withAttachment(ExtendsSingletonMirror, ()) val vdef = ValDef(name, TypeTree(), New(impl1)).withMods(mods.withAddedFlags(EnumValue, span)) flatTree(vdef :: scaffolding).withSpan(span) - } - } /** Expand a simple enum case */ def expandSimpleEnumCase(name: TermName, mods: Modifiers, definesLookups: Boolean, span: Span)(using Context): Tree = if (!enumClass.exists) EmptyTree - else if (enumClass.typeParams.nonEmpty) { + else if (enumClass.typeParams.nonEmpty) val parent = interpolatedEnumParent(span) val impl = Template(emptyConstructor, parent :: Nil, Nil, EmptyValDef, Nil) expandEnumModule(name, impl, mods, definesLookups, span) - } - else { + else val (tag, scaffolding) = nextOrdinal(name, CaseKind.Simple, definesLookups) val creator = Apply(Ident(nme.DOLLAR_NEW), List(Literal(Constant(tag)), Literal(Constant(name.toString)))) val vdef = ValDef(name, enumClassRef, creator).withMods(mods.withAddedFlags(EnumValue, span)) flatTree(vdef :: scaffolding).withSpan(span) - } -} diff --git a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala index c0cf2c0d1b81..f5199dc8ef74 100644 --- a/compiler/src/dotty/tools/dotc/ast/MainProxies.scala +++ b/compiler/src/dotty/tools/dotc/ast/MainProxies.scala @@ -10,12 +10,11 @@ import Comments.Comment import NameKinds.DefaultGetterName import Annotations.Annotation -object MainProxies { +object MainProxies: /** Generate proxy classes for @main functions and @myMain functions where myMain <:< MainAnnotation */ - def proxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + def proxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = mainAnnotationProxies(stats) ++ mainProxies(stats) - } /** Generate proxy classes for @main functions. * A function like @@ -35,32 +34,29 @@ object MainProxies { * catch case err: ParseError => showError(err) * } */ - private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + private def mainProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = import tpd._ - def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap { + def mainMethods(stats: List[Tree]): List[Symbol] = stats.flatMap: case stat: DefDef if stat.symbol.hasAnnotation(defn.MainAnnot) => stat.symbol :: Nil case stat @ TypeDef(name, impl: Template) if stat.symbol.is(Module) => mainMethods(impl.body) case _ => Nil - } mainMethods(stats).flatMap(mainProxy) - } import untpd._ - private def mainProxy(mainFun: Symbol)(using Context): List[TypeDef] = { + private def mainProxy(mainFun: Symbol)(using Context): List[TypeDef] = val mainAnnotSpan = mainFun.getAnnotation(defn.MainAnnot).get.tree.span def pos = mainFun.sourcePos val argsRef = Ident(nme.args) def addArgs(call: untpd.Tree, mt: MethodType, idx: Int): untpd.Tree = - if (mt.isImplicitMethod) { + if (mt.isImplicitMethod) report.error(em"@main method cannot have implicit parameters", pos) call - } - else { - val args = mt.paramInfos.zipWithIndex map { + else + val args = mt.paramInfos.zipWithIndex map: (formal, n) => val (parserSym, formalElem) = if (formal.isRepeatedParam) (defn.CLP_parseRemainingArguments, formal.argTypes.head) @@ -69,24 +65,21 @@ object MainProxies { TypeApply(ref(parserSym.termRef), TypeTree(formalElem) :: Nil), argsRef :: Literal(Constant(idx + n)) :: Nil) if (formal.isRepeatedParam) repeated(arg) else arg - } val call1 = Apply(call, args) - mt.resType match { + mt.resType match case restpe: MethodType => if (mt.paramInfos.lastOption.getOrElse(NoType).isRepeatedParam) report.error(em"varargs parameter of @main method must come last", pos) addArgs(call1, restpe, idx + args.length) case _ => call1 - } - } var result: List[TypeDef] = Nil if (!mainFun.owner.isStaticOwner) report.error(em"@main method is not statically accessible", pos) - else { + else var call = ref(mainFun.termRef) - mainFun.info match { + mainFun.info match case _: ExprType => case mt: MethodType => call = addArgs(call, mt, 0) @@ -94,7 +87,6 @@ object MainProxies { report.error(em"@main method cannot have type parameters", pos) case _ => report.error(em"@main can only annotate a method", pos) - } val errVar = Ident(nme.error) val handler = CaseDef( Typed(errVar, TypeTree(defn.CLP_ParseError.typeRef)), @@ -106,11 +98,10 @@ object MainProxies { /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. * The annotations will be retype-checked in another scope that may not have the same imports. */ - def insertTypeSplices = new TreeMap { + def insertTypeSplices = new TreeMap: override def transform(tree: Tree)(using Context): Tree = tree match case tree: tpd.Ident @unchecked => TypedSplice(tree) case tree => super.transform(tree) - } val annots = mainFun.annotations .filterNot(_.matches(defn.MainAnnot)) .map(annot => insertTypeSplices.transform(annot.tree)) @@ -123,9 +114,7 @@ object MainProxies { if (!ctx.reporter.hasErrors) result = mainCls.withSpan(mainAnnotSpan.toSynthetic) :: Nil - } result - } private type DefaultValueSymbols = Map[Int, Symbol] private type ParameterAnnotationss = Seq[Seq[Annotation]] @@ -171,7 +160,7 @@ object MainProxies { * } * } */ - private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = { + private def mainAnnotationProxies(stats: List[tpd.Tree])(using Context): List[untpd.Tree] = import tpd._ /** @@ -179,7 +168,7 @@ object MainProxies { * point of the compilation, they must be explicitly passed by [[mainProxy]]. */ def defaultValueSymbols(scope: Tree, funSymbol: Symbol): DefaultValueSymbols = - scope match { + scope match case TypeDef(_, template: Template) => template.body.flatMap((_: Tree) match { case dd: DefDef if dd.name.is(DefaultGetterName) && dd.name.firstPart == funSymbol.name => @@ -188,13 +177,12 @@ object MainProxies { case _ => Nil }).toMap case _ => Map.empty - } /** Computes the list of main methods present in the code. */ - def mainMethods(scope: Tree, stats: List[Tree]): List[(Symbol, ParameterAnnotationss, DefaultValueSymbols, Option[Comment])] = stats.flatMap { + def mainMethods(scope: Tree, stats: List[Tree]): List[(Symbol, ParameterAnnotationss, DefaultValueSymbols, Option[Comment])] = stats.flatMap: case stat: DefDef => val sym = stat.symbol - sym.annotations.filter(_.matches(defn.MainAnnotationClass)) match { + sym.annotations.filter(_.matches(defn.MainAnnotationClass)) match case Nil => Nil case _ :: Nil => @@ -205,18 +193,15 @@ object MainProxies { case mainAnnot :: others => report.error(em"method cannot have multiple main annotations", mainAnnot.tree) Nil - } case stat @ TypeDef(_, impl: Template) if stat.symbol.is(Module) => mainMethods(stat, impl.body) case _ => Nil - } // Assuming that the top-level object was already generated, all main methods will have a scope mainMethods(EmptyTree, stats).flatMap(mainAnnotationProxy) - } - private def mainAnnotationProxy(mainFun: Symbol, paramAnnotations: ParameterAnnotationss, defaultValueSymbols: DefaultValueSymbols, docComment: Option[Comment])(using Context): Option[TypeDef] = { + private def mainAnnotationProxy(mainFun: Symbol, paramAnnotations: ParameterAnnotationss, defaultValueSymbols: DefaultValueSymbols, docComment: Option[Comment])(using Context): Option[TypeDef] = val mainAnnot = mainFun.getAnnotation(defn.MainAnnotationClass).get def pos = mainFun.sourcePos @@ -299,21 +284,18 @@ object MainProxies { /** Turns an annotation (e.g. `@main(40)`) into an instance of the class (e.g. `new scala.main(40)`). */ def instantiateAnnotation(annot: Annotation): Tree = - val argss = { - def recurse(t: tpd.Tree, acc: List[List[Tree]]): List[List[Tree]] = t match { + val argss = + def recurse(t: tpd.Tree, acc: List[List[Tree]]): List[List[Tree]] = t match case Apply(t, args: List[tpd.Tree]) => recurse(t, extractArgs(args) :: acc) case _ => acc - } def extractArgs(args: List[tpd.Tree]): List[Tree] = - args.flatMap { + args.flatMap: case Typed(SeqLiteral(varargs, _), _) => varargs.map(arg => TypedSplice(arg)) case arg: Select if arg.name.is(DefaultGetterName) => Nil // Ignore default values, they will be added later by the compiler case arg => List(TypedSplice(arg)) - } recurse(annot.tree, Nil) - } New(TypeTree(annot.symbol.typeRef), argss) end instantiateAnnotation @@ -361,11 +343,10 @@ object MainProxies { /** Replace typed `Ident`s that have been typed with a TypeSplice with the reference to the symbol. * The annotations will be retype-checked in another scope that may not have the same imports. */ - def insertTypeSplices = new TreeMap { + def insertTypeSplices = new TreeMap: override def transform(tree: Tree)(using Context): Tree = tree match case tree: tpd.Ident @unchecked => TypedSplice(tree) case tree => super.transform(tree) - } val annots = mainFun.annotations .filterNot(_.matches(defn.MainAnnotationClass)) .map(annot => insertTypeSplices.transform(annot.tree)) @@ -381,7 +362,7 @@ object MainProxies { if (!mainFun.owner.isStaticOwner) report.error(em"main method is not statically accessible", pos) None - else mainFun.info match { + else mainFun.info match case _: ExprType => Some(generateMainClass(unitToValue(ref(mainFun.termRef)), Nil, Nil)) case mt: MethodType => @@ -400,8 +381,6 @@ object MainProxies { case _ => report.error(em"main can only annotate a method", pos) None - } - } /** A class responsible for extracting the docstrings of a method. */ private class Documentation(docComment: Option[Comment]): @@ -415,21 +394,19 @@ object MainProxies { private var _mainDoc: String = "" private var _argDocs: Map[String, String] = Map() - docComment match { + docComment match case Some(comment) => if comment.isDocComment then parseDocComment(comment.raw) else _mainDoc = comment.raw case None => - } private def cleanComment(raw: String): String = var lines: Seq[String] = raw.trim.nn.split('\n').nn.toSeq lines = lines.map(l => l.substring(skipLineLead(l, -1), l.length).nn.trim.nn) - var s = lines.foldLeft("") { + var s = lines.foldLeft(""): case ("", s2) => s2 case (s1, "") if s1.last == '\n' => s1 // Multiple newlines are kept as single newlines case (s1, "") => s1 + '\n' case (s1, s2) if s1.last == '\n' => s1 + s2 case (s1, s2) => s1 + ' ' + s2 - } s.replaceAll(raw"\[\[", "").nn.replaceAll(raw"\]\]", "").nn.trim.nn private def parseDocComment(raw: String): Unit = @@ -446,4 +423,3 @@ object MainProxies { val argsCommentsTexts = argsCommentsTextSpans.mapValues({ case (beg, end) => raw.substring(beg, end).nn }) _argDocs = argsCommentsTexts.mapValues(cleanComment(_)).toMap end Documentation -} diff --git a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala index ace396d1e583..32e75c3fc95a 100644 --- a/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala +++ b/compiler/src/dotty/tools/dotc/ast/NavigateAST.scala @@ -11,13 +11,13 @@ import dotty.tools.dotc.core.Types.Type /** Utility functions to go from typed to untyped ASTs */ // TODO: Handle trees with mixed source files -object NavigateAST { +object NavigateAST: /** The untyped tree corresponding to typed tree `tree` in the compilation * unit specified by `ctx` */ def toUntyped(tree: tpd.Tree)(using Context): untpd.Tree = - untypedPath(tree, exactMatch = true) match { + untypedPath(tree, exactMatch = true) match case (utree: untpd.Tree) :: _ => utree case _ => @@ -26,7 +26,6 @@ object NavigateAST { Error(i"""no untyped tree for $tree, pos = ${tree.sourcePos} |best matching path =\n$loosePath%\n====\n% |path positions = ${loosePath.map(_.sourcePos)}""") - } /** The reverse path of untyped trees starting with a tree that closest matches * `tree` and ending in the untyped tree at the root of the compilation unit @@ -41,19 +40,16 @@ object NavigateAST { * envelope of the definition, and declare success if we find another DefTree. */ def untypedPath(tree: tpd.Tree, exactMatch: Boolean = false)(using Context): List[Positioned] = - tree match { + tree match case tree: MemberDef[?] => - untypedPath(tree.span) match { + untypedPath(tree.span) match case path @ (last: DefTree[?]) :: _ => path case path if !exactMatch => path case _ => Nil - } case _ => - untypedPath(tree.span) match { + untypedPath(tree.span) match case (path @ last :: _) if last.span == tree.span || !exactMatch => path case _ => Nil - } - } /** The reverse part of the untyped root of the compilation unit of `ctx` to * the given `span`. @@ -71,38 +67,34 @@ object NavigateAST { * end point are the same, so this is useful when trying to reconcile * nodes with source code. */ - def pathTo(span: Span, from: List[Positioned], skipZeroExtent: Boolean = false)(using Context): List[Positioned] = { - def childPath(it: Iterator[Any], path: List[Positioned]): List[Positioned] = { + def pathTo(span: Span, from: List[Positioned], skipZeroExtent: Boolean = false)(using Context): List[Positioned] = + def childPath(it: Iterator[Any], path: List[Positioned]): List[Positioned] = var bestFit: List[Positioned] = path - while (it.hasNext) { - val path1 = it.next() match { + while (it.hasNext) + val path1 = it.next() match case p: Positioned => singlePath(p, path) case m: untpd.Modifiers => childPath(m.productIterator, path) case xs: List[?] => childPath(xs.iterator, path) case _ => path - } if ((path1 ne path) && ((bestFit eq path) || bestFit.head.span != path1.head.span && bestFit.head.span.contains(path1.head.span))) bestFit = path1 - } bestFit - } /* * Annotations trees are located in the Type */ def unpackAnnotations(t: Type, path: List[Positioned]): List[Positioned] = - t match { + t match case ann: AnnotatedType => unpackAnnotations(ann.parent, childPath(ann.annot.tree.productIterator, path)) case imp: ImportType => childPath(imp.expr.productIterator, path) case other => path - } def singlePath(p: Positioned, path: List[Positioned]): List[Positioned] = - if (p.span.exists && !(skipZeroExtent && p.span.isZeroExtent) && p.span.contains(span)) { + if (p.span.exists && !(skipZeroExtent && p.span.isZeroExtent) && p.span.contains(span)) // FIXME: We shouldn't be manually forcing trees here, we should replace // our usage of `productIterator` by something in `Positioned` that takes // care of low-level details like this for us. @@ -115,13 +107,8 @@ object NavigateAST { case _ => p.productIterator childPath(iterator, p :: path) - } - else { - p match { + else + p match case t: untpd.TypeTree => unpackAnnotations(t.typeOpt, path) case _ => path - } - } childPath(from.iterator, Nil) - } -} diff --git a/compiler/src/dotty/tools/dotc/ast/Positioned.scala b/compiler/src/dotty/tools/dotc/ast/Positioned.scala index dd783be7a9e1..b5ee0771b201 100644 --- a/compiler/src/dotty/tools/dotc/ast/Positioned.scala +++ b/compiler/src/dotty/tools/dotc/ast/Positioned.scala @@ -15,7 +15,7 @@ import annotation.internal.sharable /** A base class for things that have positions (currently: modifiers and trees) */ -abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable { +abstract class Positioned(implicit @constructorOnly src: SourceFile) extends SrcPos, Product, Cloneable: import Positioned.{ids, nextId, debugId} private var mySpan: Span = _ @@ -63,7 +63,7 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src */ def withSpan(span: Span): this.type = if (span == mySpan) this - else { + else val newpd: this.type = if !mySpan.exists then if span.exists then envelope(source, span.startPos) // fill in children spans @@ -72,7 +72,6 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src cloneIn(source) newpd.span = span newpd - } /** The union of startSpan and the spans of all positioned children that * have the same source as this node, except that Inlined nodes only @@ -83,19 +82,18 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src * the left, or, if that one does not exist, to the start position of the envelope * of all children to the right. */ - def envelope(src: SourceFile, startSpan: Span = NoSpan): Span = (this: @unchecked) match { + def envelope(src: SourceFile, startSpan: Span = NoSpan): Span = (this: @unchecked) match case Trees.Inlined(call, _, _) => call.span case _ => - def include(span: Span, x: Any): Span = x match { + def include(span: Span, x: Any): Span = x match case p: Positioned => if (p.source != src) span else if (p.span.exists) span.union(p.span) - else if (span.exists) { + else if (span.exists) if (span.end != MaxOffset) p.span = p.envelope(src, span.endPos) span - } else // No span available to assign yet, signal this by returning a span with MaxOffset end Span(MaxOffset, MaxOffset) case m: untpd.Modifiers => @@ -103,7 +101,6 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src case y :: ys => include(include(span, y), ys) case _ => span - } val limit = productArity def includeChildren(span: Span, n: Int): Span = if (n < limit) includeChildren(include(span, productElement(n): @unchecked), n + 1) @@ -121,18 +118,16 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src // Go through it again with the known start position. includeChildren(span1.startPos, 0) span2.toSynthetic - } /** Clone this node but assign it a fresh id which marks it as a node in `file`. */ - def cloneIn(src: SourceFile): this.type = { + def cloneIn(src: SourceFile): this.type = val newpd: this.type = clone.asInstanceOf[this.type] newpd.allocateId() newpd.mySource = src newpd - } - def contains(that: Positioned): Boolean = { - def isParent(x: Any): Boolean = x match { + def contains(that: Positioned): Boolean = + def isParent(x: Any): Boolean = x match case x: Positioned => x.contains(that) case m: untpd.Modifiers => @@ -141,18 +136,14 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src xs.exists(isParent) case _ => false - } (this eq that) || - (this.span contains that.span) && { + (this.span contains that.span) `&&`: var n = productArity var found = false - while (!found && n > 0) { + while (!found && n > 0) n -= 1 found = isParent(productElement(n)) - } found - } - } private class LastPosRef: var positioned: Positioned | Null = null @@ -162,10 +153,10 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src * - Parent spans contain child spans * - If item is a non-empty tree, it has a position */ - def checkPos(nonOverlapping: Boolean)(using Context): Unit = try { + def checkPos(nonOverlapping: Boolean)(using Context): Unit = try import untpd._ val last = LastPosRef() - def check(p: Any): Unit = p match { + def check(p: Any): Unit = p match case p: Positioned => assert(span contains p.span, i"""position error, parent span does not contain child span @@ -173,14 +164,13 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src |parent span = $span, |child = $p # ${p.uniqueId}, |child span = ${p.span}""".stripMargin) - p match { + p match case tree: Tree if !tree.isEmpty => assert(tree.span.exists, s"position error: position not set for $tree # ${tree.uniqueId}") case _ => - } if nonOverlapping then - this match { + this match case _: XMLBlock => // FIXME: Trees generated by the XML parser do not satisfy `checkPos` case _: WildcardFunction @@ -194,7 +184,6 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src |1st child span = ${last.span} |2nd child = $p |2nd child span = ${p.span}""".stripMargin) - } last.positioned = p last.span = p.span p.checkPos(nonOverlapping) @@ -204,8 +193,7 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src case xs: List[?] => xs.foreach(check) case _ => - } - this match { + this match case tree: DefDef if tree.name == nme.CONSTRUCTOR && tree.mods.is(JavaDefined) => // Special treatment for constructors coming from Java: // Leave out leading type params, they are copied with wrong positions from parent class @@ -222,20 +210,15 @@ abstract class Positioned(implicit @constructorOnly src: SourceFile) extends Src case _ => val end = productArity var n = 0 - while (n < end) { + while (n < end) check(productElement(n)) n += 1 - } - } - } - catch { + catch case ex: AssertionError => println(i"error while checking $this") throw ex - } -} -object Positioned { +object Positioned: @sharable private var debugId = Int.MinValue @sharable private var ids: java.util.WeakHashMap[Positioned, Int] | Null = null @sharable private var nextId: Int = 0 @@ -246,4 +229,3 @@ object Positioned { || debugId != ctx.settings.YdebugTreeWithId.default then ids = java.util.WeakHashMap() -} diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index 2d335d1ed380..66176d62519d 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -18,47 +18,38 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => def unsplice(tree: Trees.Tree[T]): Trees.Tree[T] = tree - def isDeclarationOrTypeDef(tree: Tree): Boolean = unsplice(tree) match { + def isDeclarationOrTypeDef(tree: Tree): Boolean = unsplice(tree) match case DefDef(_, _, _, EmptyTree) | ValDef(_, _, EmptyTree) | TypeDef(_, _) => true case _ => false - } - def isOpAssign(tree: Tree): Boolean = unsplice(tree) match { + def isOpAssign(tree: Tree): Boolean = unsplice(tree) match case Apply(fn, _ :: _) => - unsplice(fn) match { + unsplice(fn) match case Select(_, name) if name.isOpAssignmentName => true case _ => false - } case _ => false - } - class MatchingArgs(params: List[Symbol], args: List[Tree])(using Context) { - def foreach(f: (Symbol, Tree) => Unit): Boolean = { - def recur(params: List[Symbol], args: List[Tree]): Boolean = params match { + class MatchingArgs(params: List[Symbol], args: List[Tree])(using Context): + def foreach(f: (Symbol, Tree) => Unit): Boolean = + def recur(params: List[Symbol], args: List[Tree]): Boolean = params match case Nil => args.isEmpty case param :: params1 => - if (param.info.isRepeatedParam) { + if (param.info.isRepeatedParam) for (arg <- args) f(param, arg) true - } - else args match { + else args match case Nil => false case arg :: args1 => f(param, args.head) recur(params1, args1) - } - } recur(params, args) - } def zipped: List[(Symbol, Tree)] = map((_, _)) - def map[R](f: (Symbol, Tree) => R): List[R] = { + def map[R](f: (Symbol, Tree) => R): List[R] = val b = List.newBuilder[R] foreach(b += f(_, _)) b.result() - } - } /** The method part of an application node, possibly enclosed in a block * with only valdefs as statements. the reason for also considering blocks @@ -70,37 +61,32 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => * (x$2, x$1) * } */ - def methPart(tree: Tree): Tree = stripApply(tree) match { + def methPart(tree: Tree): Tree = stripApply(tree) match case TypeApply(fn, _) => methPart(fn) case AppliedTypeTree(fn, _) => methPart(fn) // !!! should not be needed case Block(stats, expr) => methPart(expr) case mp => mp - } /** If this is an application, its function part, stripping all * Apply nodes (but leaving TypeApply nodes in). Otherwise the tree itself. */ - def stripApply(tree: Tree): Tree = unsplice(tree) match { + def stripApply(tree: Tree): Tree = unsplice(tree) match case Apply(fn, _) => stripApply(fn) case _ => tree - } /** If this is a block, its expression part */ - def stripBlock(tree: Tree): Tree = unsplice(tree) match { + def stripBlock(tree: Tree): Tree = unsplice(tree) match case Block(_, expr) => stripBlock(expr) case Inlined(_, _, expr) => stripBlock(expr) case _ => tree - } - def stripInlined(tree: Tree): Tree = unsplice(tree) match { + def stripInlined(tree: Tree): Tree = unsplice(tree) match case Inlined(_, _, expr) => stripInlined(expr) case _ => tree - } - def stripAnnotated(tree: Tree): Tree = tree match { + def stripAnnotated(tree: Tree): Tree = tree match case Annotated(arg, _) => arg case _ => tree - } def stripTyped(tree: Tree): Tree = unsplice(tree) match case Typed(expr, _) => @@ -109,12 +95,11 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => tree /** The number of arguments in an application */ - def numArgs(tree: Tree): Int = unsplice(tree) match { + def numArgs(tree: Tree): Int = unsplice(tree) match case Apply(fn, args) => numArgs(fn) + args.length case TypeApply(fn, _) => numArgs(fn) case Block(_, expr) => numArgs(expr) case _ => 0 - } /** The type arguments of a possibly curried call */ def typeArgss(tree: Tree): List[List[Tree]] = @@ -135,12 +120,11 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => loop(tree, Nil) /** All term arguments of an application in a single flattened list */ - def allArguments(tree: Tree): List[Tree] = unsplice(tree) match { + def allArguments(tree: Tree): List[Tree] = unsplice(tree) match case Apply(fn, args) => allArguments(fn) ::: args case TypeApply(fn, _) => allArguments(fn) case Block(_, expr) => allArguments(expr) case _ => Nil - } /** Is tree explicitly parameterized with type arguments? */ def hasExplicitTypeArgs(tree: Tree): Boolean = tree match @@ -149,72 +133,63 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => false /** Is tree a path? */ - def isPath(tree: Tree): Boolean = unsplice(tree) match { + def isPath(tree: Tree): Boolean = unsplice(tree) match case Ident(_) | This(_) | Super(_, _) => true case Select(qual, _) => isPath(qual) case _ => false - } /** Is tree a self constructor call this(...)? I.e. a call to a constructor of the * same object? */ - def isSelfConstrCall(tree: Tree): Boolean = methPart(tree) match { + def isSelfConstrCall(tree: Tree): Boolean = methPart(tree) match case Ident(nme.CONSTRUCTOR) | Select(This(_), nme.CONSTRUCTOR) => true case _ => false - } /** Is tree a super constructor call? */ - def isSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { + def isSuperConstrCall(tree: Tree): Boolean = methPart(tree) match case Select(Super(_, _), nme.CONSTRUCTOR) => true case _ => false - } - def isSuperSelection(tree: Tree): Boolean = unsplice(tree) match { + def isSuperSelection(tree: Tree): Boolean = unsplice(tree) match case Select(Super(_, _), _) => true case _ => false - } - def isSelfOrSuperConstrCall(tree: Tree): Boolean = methPart(tree) match { + def isSelfOrSuperConstrCall(tree: Tree): Boolean = methPart(tree) match case Ident(nme.CONSTRUCTOR) | Select(This(_), nme.CONSTRUCTOR) | Select(Super(_, _), nme.CONSTRUCTOR) => true case _ => false - } /** Is tree a backquoted identifier or definition */ def isBackquoted(tree: Tree): Boolean = tree.hasAttachment(Backquoted) /** Is tree a variable pattern? */ - def isVarPattern(pat: Tree): Boolean = unsplice(pat) match { + def isVarPattern(pat: Tree): Boolean = unsplice(pat) match case x: Ident => x.name.isVarPattern && !isBackquoted(x) case _ => false - } /** The first constructor definition in `stats` */ - def firstConstructor(stats: List[Tree]): Tree = stats match { + def firstConstructor(stats: List[Tree]): Tree = stats match case (meth: DefDef) :: _ if meth.name.isConstructorName => meth case stat :: stats => firstConstructor(stats) case nil => EmptyTree - } /** Is tpt a vararg type of the form T* or => T*? */ - def isRepeatedParamType(tpt: Tree)(using Context): Boolean = stripByNameType(tpt) match { + def isRepeatedParamType(tpt: Tree)(using Context): Boolean = stripByNameType(tpt) match case tpt: TypeTree => tpt.typeOpt.isRepeatedParam case AppliedTypeTree(Select(_, tpnme.REPEATED_PARAM_CLASS), _) => true case _ => false - } /** Is this argument node of the form *, or is it a reference to * such an argument ? The latter case can happen when an argument is lifted. */ - def isWildcardStarArg(tree: Tree)(using Context): Boolean = unbind(tree) match { + def isWildcardStarArg(tree: Tree)(using Context): Boolean = unbind(tree) match case Typed(Ident(nme.WILDCARD_STAR), _) => true case Typed(_, Ident(tpnme.WILDCARD_STAR)) => true case Typed(_, tpt: TypeTree) => tpt.typeOpt.isRepeatedParam case NamedArg(_, arg) => isWildcardStarArg(arg) case arg => arg.typeOpt.widen.isRepeatedParam - } /** Is tree a type tree of the form `=> T` or (under pureFunctions) `{refs}-> T`? */ def isByNameType(tree: Tree)(using Context): Boolean = @@ -235,20 +210,18 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => /** Is the argument a wildcard argument of the form `_` or `x @ _`? */ - def isWildcardArg(tree: Tree): Boolean = unbind(tree) match { + def isWildcardArg(tree: Tree): Boolean = unbind(tree) match case Ident(nme.WILDCARD) => true case _ => false - } /** Does this list contain a named argument tree? */ def hasNamedArg(args: List[Any]): Boolean = args exists isNamedArg val isNamedArg: Any => Boolean = (arg: Any) => arg.isInstanceOf[Trees.NamedArg[_]] /** Is this pattern node a catch-all (wildcard or variable) pattern? */ - def isDefaultCase(cdef: CaseDef): Boolean = cdef match { + def isDefaultCase(cdef: CaseDef): Boolean = cdef match case CaseDef(pat, EmptyTree, _) => isWildcardArg(pat) case _ => false - } /** Does this CaseDef catch Throwable? */ def catchesThrowable(cdef: CaseDef)(using Context): Boolean = @@ -257,12 +230,10 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => /** Does this CaseDef catch everything of a certain Type? */ def catchesAllOf(cdef: CaseDef, threshold: Type)(using Context): Boolean = isDefaultCase(cdef) || - cdef.guard.isEmpty && { - unbind(cdef.pat) match { + cdef.guard.isEmpty `&&`: + unbind(cdef.pat) match case Typed(Ident(nme.WILDCARD), tpt) => threshold <:< tpt.typeOpt case _ => false - } - } /** Is this case guarded? */ def isGuardedCase(cdef: CaseDef): Boolean = cdef.guard ne EmptyTree @@ -306,86 +277,75 @@ trait TreeInfo[T <: Untyped] { self: Trees.Instance[T] => case _ => None /** The underlying pattern ignoring any bindings */ - def unbind(x: Tree): Tree = unsplice(x) match { + def unbind(x: Tree): Tree = unsplice(x) match case Bind(_, y) => unbind(y) case y => y - } /** The largest subset of {NoInits, PureInterface} that a * trait or class with these parents can have as flags. */ - def parentsKind(parents: List[Tree])(using Context): FlagSet = parents match { + def parentsKind(parents: List[Tree])(using Context): FlagSet = parents match case Nil => NoInitsInterface case Apply(_, _ :: _) :: _ | Block(_, _) :: _ => EmptyFlags case _ :: parents1 => parentsKind(parents1) - } /** Checks whether predicate `p` is true for all result parts of this expression, * where we zoom into Ifs, Matches, and Blocks. */ - def forallResults(tree: Tree, p: Tree => Boolean): Boolean = tree match { + def forallResults(tree: Tree, p: Tree => Boolean): Boolean = tree match case If(_, thenp, elsep) => forallResults(thenp, p) && forallResults(elsep, p) case Match(_, cases) => cases forall (c => forallResults(c.body, p)) case Block(_, expr) => forallResults(expr, p) case _ => p(tree) - } - def appliedCore(tree: Tree): Tree = tree match { + def appliedCore(tree: Tree): Tree = tree match case Apply(fn, _) => appliedCore(fn) case TypeApply(fn, _) => appliedCore(fn) case AppliedTypeTree(fn, _) => appliedCore(fn) case tree => tree - } /** Is tree an application with result `this.type`? * Accept `b.addOne(x)` and also `xs(i) += x` * where the op is an assignment operator. */ - def isThisTypeResult(tree: Tree)(using Context): Boolean = appliedCore(tree) match { + def isThisTypeResult(tree: Tree)(using Context): Boolean = appliedCore(tree) match case fun @ Select(receiver, op) => val argss = termArgss(tree) - tree.tpe match { + tree.tpe match case ThisType(tref) => tref.symbol == receiver.symbol case tref: TermRef => tref.symbol == receiver.symbol || argss.exists(_.exists(tref.symbol == _.symbol)) case _ => def checkSingle(sym: Symbol): Boolean = - (sym == receiver.symbol) || { - receiver match { + (sym == receiver.symbol) `||`: + receiver match case Apply(_, _) => op.isOpAssignmentName // xs(i) += x case _ => receiver.symbol != NoSymbol && (receiver.symbol.isGetter || receiver.symbol.isField) // xs.addOne(x) for var xs - } - } - @tailrec def loop(mt: Type): Boolean = mt match { + @tailrec def loop(mt: Type): Boolean = mt match case m: MethodType => - m.resType match { + m.resType match case ThisType(tref) => checkSingle(tref.symbol) case tref: TermRef => checkSingle(tref.symbol) case restpe => loop(restpe) - } case PolyType(_, restpe) => loop(restpe) case _ => false - } fun.symbol != NoSymbol && loop(fun.symbol.info) - } case _ => tree.tpe.isInstanceOf[ThisType] - } } trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] => import untpd._ /** The underlying tree when stripping any TypedSplice or Parens nodes */ - override def unsplice(tree: Tree): Tree = tree match { + override def unsplice(tree: Tree): Tree = tree match case TypedSplice(tree1) => tree1 case Parens(tree1) => unsplice(tree1) case _ => tree - } - def functionWithUnknownParamType(tree: Tree): Option[Tree] = tree match { + def functionWithUnknownParamType(tree: Tree): Option[Tree] = tree match case Function(args, _) => if (args.exists { case ValDef(_, tpt, _) => tpt.isEmpty @@ -400,7 +360,6 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] functionWithUnknownParamType(expr) case _ => None - } def isFunctionWithUnknownParamType(tree: Tree): Boolean = functionWithUnknownParamType(tree).isDefined @@ -411,7 +370,7 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] case _ => false /** Is `tree` an context function or closure, possibly nested in a block? */ - def isContextualClosure(tree: Tree)(using Context): Boolean = unsplice(tree) match { + def isContextualClosure(tree: Tree)(using Context): Boolean = unsplice(tree) match case tree: FunctionWithMods => tree.mods.is(Given) case Function((param: untpd.ValDef) :: _, _) => param.mods.is(Given) case Closure(_, meth, _) => true @@ -422,12 +381,11 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] else isUsingClause(params) case _ => false - } /** The largest subset of {NoInits, PureInterface} that a * trait or class enclosing this statement can have as flags. */ - private def defKind(tree: Tree)(using Context): FlagSet = unsplice(tree) match { + private def defKind(tree: Tree)(using Context): FlagSet = unsplice(tree) match case EmptyTree | _: Import => NoInitsInterface case tree: TypeDef => if (tree.isClassDef) NoInits else NoInitsInterface case tree: DefDef => @@ -444,7 +402,6 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] NoInits case tree: ValDef => if (tree.unforcedRhs == EmptyTree) NoInitsInterface else EmptyFlags case _ => EmptyFlags - } /** The largest subset of {NoInits, PureInterface} that a * trait or class with this body can have as flags. @@ -456,13 +413,11 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] type VarInfo = (NameTree, Tree) /** An extractor for trees of the form `id` or `id: T` */ - object IdPattern { - def unapply(tree: Tree)(using Context): Option[VarInfo] = tree match { + object IdPattern: + def unapply(tree: Tree)(using Context): Option[VarInfo] = tree match case id: Ident if id.name != nme.WILDCARD => Some(id, TypeTree()) case Typed(id: Ident, tpt) => Some((id, tpt)) case _ => None - } - } /** Under pureFunctions: A builder and extractor for `=> T`, which is an alias for `->{cap} T`. * Only trees of the form `=> T` are matched; trees written directly as `->{cap} T` @@ -492,7 +447,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => * Idempotent if running the statement a second time has no side effects * Impure otherwise */ - def statPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { + def statPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match case EmptyTree | TypeDef(_, _) | Import(_, _) @@ -504,7 +459,6 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => Impure // TODO: It seem like this should be exprPurity(tree) // But if we do that the repl/vars test break. Need to figure out why that's the case. - } /** The purity level of this expression. See docs for PurityLevel for what that means * @@ -514,7 +468,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => * takes a different code path than all to follow; but they are idempotent * because running the expression a second time gives the cached result. */ - def exprPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match { + def exprPurity(tree: Tree)(using Context): PurityLevel = unsplice(tree) match case EmptyTree | This(_) | Super(_, _) @@ -548,22 +502,19 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => exprPurity(expr) case _ => Impure - } private def minOf(l0: PurityLevel, ls: List[PurityLevel]) = ls.foldLeft(l0)(_ `min` _) - def isPurePath(tree: Tree)(using Context): Boolean = tree.tpe match { + def isPurePath(tree: Tree)(using Context): Boolean = tree.tpe match case tpe: ConstantType => exprPurity(tree) >= Pure case _ => exprPurity(tree) == PurePath - } def isPureExpr(tree: Tree)(using Context): Boolean = exprPurity(tree) >= Pure - def isIdempotentPath(tree: Tree)(using Context): Boolean = tree.tpe match { + def isIdempotentPath(tree: Tree)(using Context): Boolean = tree.tpe match case tpe: ConstantType => exprPurity(tree) >= Idempotent case _ => exprPurity(tree) >= IdempotentPath - } def isIdempotentExpr(tree: Tree)(using Context): Boolean = exprPurity(tree) >= Idempotent @@ -591,7 +542,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => * @DarkDimius: need to make sure that lazy accessor methods have Lazy and Stable * flags set. */ - def refPurity(tree: Tree)(using Context): PurityLevel = { + def refPurity(tree: Tree)(using Context): PurityLevel = val sym = tree.symbol if (!tree.hasType) Impure else if !tree.tpe.widen.isParameterless then PurePath @@ -603,7 +554,6 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => else if (sym.is(Lazy)) IdempotentPath else if sym.isAllOf(InlineParam) then Impure else PurePath - } def isPureRef(tree: Tree)(using Context): Boolean = refPurity(tree) == PurePath @@ -660,10 +610,10 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => * * This avoids the situation where we have a Select node that does not have a symbol. */ - def constToLiteral(tree: Tree)(using Context): Tree = { + def constToLiteral(tree: Tree)(using Context): Tree = assert(!tree.isType) val tree1 = ConstFold(tree) - tree1.tpe.widenTermRefExpr.dealias.normalized match { + tree1.tpe.widenTermRefExpr.dealias.normalized match case ConstantType(Constant(_: Type)) if tree.isInstanceOf[Block] => // We can't rewrite `{ class A; classOf[A] }` to `classOf[A]`, so we leave // blocks returning a class literal alone, even if they're idempotent. @@ -692,8 +642,6 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => // Simplify `pre.op` to `{ pre; v }` where `v` is the value of `pre.op` Block(pre :: Nil, Literal(value)).withSpan(tree.span) case _ => tree1 - } - } def isExtMethodApply(tree: Tree)(using Context): Boolean = methPart(tree) match case Inlined(call, _, _) => isExtMethodApply(call) @@ -702,55 +650,47 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => /** Is symbol potentially a getter of a mutable variable? */ - def mayBeVarGetter(sym: Symbol)(using Context): Boolean = { - def maybeGetterType(tpe: Type): Boolean = tpe match { + def mayBeVarGetter(sym: Symbol)(using Context): Boolean = + def maybeGetterType(tpe: Type): Boolean = tpe match case _: ExprType => true case tpe: MethodType => tpe.isImplicitMethod case tpe: PolyType => maybeGetterType(tpe.resultType) case _ => false - } sym.owner.isClass && !sym.isStableMember && maybeGetterType(sym.info) - } /** Is tree a reference to a mutable variable, or to a potential getter * that has a setter in the same class? */ - def isVariableOrGetter(tree: Tree)(using Context): Boolean = { + def isVariableOrGetter(tree: Tree)(using Context): Boolean = def sym = tree.symbol def isVar = sym.is(Mutable) def isGetter = mayBeVarGetter(sym) && sym.owner.info.member(sym.name.asTermName.setterName).exists - unsplice(tree) match { + unsplice(tree) match case Ident(_) => isVar case Select(_, _) => isVar || isGetter case Apply(_, _) => - methPart(tree) match { + methPart(tree) match case Select(qual, nme.apply) => qual.tpe.member(nme.update).exists case _ => false - } case _ => false - } - } /** Is tree a `this` node which belongs to `enclClass`? */ - def isSelf(tree: Tree, enclClass: Symbol)(using Context): Boolean = unsplice(tree) match { + def isSelf(tree: Tree, enclClass: Symbol)(using Context): Boolean = unsplice(tree) match case This(_) => tree.symbol == enclClass case _ => false - } /** Strips layers of `.asInstanceOf[T]` / `_.$asInstanceOf[T]()` from an expression */ - def stripCast(tree: Tree)(using Context): Tree = { + def stripCast(tree: Tree)(using Context): Tree = def isCast(sel: Tree) = sel.symbol.isTypeCast - unsplice(tree) match { + unsplice(tree) match case TypeApply(sel @ Select(inner, _), _) if isCast(sel) => stripCast(inner) case Apply(TypeApply(sel @ Select(inner, _), _), Nil) if isCast(sel) => stripCast(inner) case t => t - } - } /** The type and term arguments of a possibly curried call, in the order they are given */ def allArgss(tree: Tree): List[List[Tree]] = @@ -769,71 +709,60 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => /** Decompose a template body into parameters and other statements */ def decomposeTemplateBody(body: List[Tree])(using Context): (List[Tree], List[Tree]) = - body.partition { + body.partition: case stat: TypeDef => stat.symbol is Flags.Param case stat: ValOrDefDef => stat.symbol.is(Flags.ParamAccessor) && !stat.symbol.isSetter case _ => false - } /** An extractor for closures, either contained in a block or standalone. */ - object closure { - def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match { + object closure: + def unapply(tree: Tree): Option[(List[Tree], Tree, Tree)] = tree match case Block(_, expr) => unapply(expr) case Closure(env, meth, tpt) => Some(env, meth, tpt) case Typed(expr, _) => unapply(expr) case _ => None - } - } /** An extractor for def of a closure contained the block of the closure. */ - object closureDef { - def unapply(tree: Tree)(using Context): Option[DefDef] = tree match { + object closureDef: + def unapply(tree: Tree)(using Context): Option[DefDef] = tree match case Block((meth : DefDef) :: Nil, closure: Closure) if meth.symbol == closure.meth.symbol => Some(meth) case Block(Nil, expr) => unapply(expr) case _ => None - } - } /** If tree is a closure, its body, otherwise tree itself */ - def closureBody(tree: Tree)(using Context): Tree = tree match { + def closureBody(tree: Tree)(using Context): Tree = tree match case closureDef(meth) => meth.rhs case _ => tree - } /** The variables defined by a pattern, in reverse order of their appearance. */ - def patVars(tree: Tree)(using Context): List[Symbol] = { - val acc = new TreeAccumulator[List[Symbol]] { - def apply(syms: List[Symbol], tree: Tree)(using Context) = tree match { + def patVars(tree: Tree)(using Context): List[Symbol] = + val acc = new TreeAccumulator[List[Symbol]]: + def apply(syms: List[Symbol], tree: Tree)(using Context) = tree match case Bind(_, body) => apply(tree.symbol :: syms, body) case Annotated(tree, id @ Ident(tpnme.BOUNDTYPE_ANNOT)) => apply(id.symbol :: syms, tree) case _ => foldOver(syms, tree) - } - } acc(Nil, tree) - } /** Is this pattern node a catch-all or type-test pattern? */ - def isCatchCase(cdef: CaseDef)(using Context): Boolean = cdef match { + def isCatchCase(cdef: CaseDef)(using Context): Boolean = cdef match case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) => isSimpleThrowable(tpt.tpe) case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) => isSimpleThrowable(tpt.tpe) case _ => isDefaultCase(cdef) - } - private def isSimpleThrowable(tp: Type)(using Context): Boolean = tp match { + private def isSimpleThrowable(tp: Type)(using Context): Boolean = tp match case tp @ TypeRef(pre, _) => (pre == NoPrefix || pre.typeSymbol.isStatic) && (tp.symbol derivesFrom defn.ThrowableClass) && !tp.symbol.is(Trait) case _ => false - } /** The symbols defined locally in a statement list */ def localSyms(stats: List[Tree])(using Context): List[Symbol] = @@ -853,32 +782,28 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => * if no such path exists. * Pre: `sym` must have a position. */ - def defPath(sym: Symbol, root: Tree)(using Context): List[Tree] = trace.onDebug(s"defpath($sym with position ${sym.span}, ${root.show})") { + def defPath(sym: Symbol, root: Tree)(using Context): List[Tree] = trace.onDebug(s"defpath($sym with position ${sym.span}, ${root.show})"): require(sym.span.exists, sym) - object accum extends TreeAccumulator[List[Tree]] { + object accum extends TreeAccumulator[List[Tree]]: def apply(x: List[Tree], tree: Tree)(using Context): List[Tree] = if (tree.span.contains(sym.span)) if (definedSym(tree) == sym) tree :: x - else { + else val x1 = foldOver(x, tree) if (x1 ne x) tree :: x1 else x1 - } else x - } accum(Nil, root) - } /** The top level classes in this tree, including only those module classes that * are not a linked class of some other class in the result. */ - def topLevelClasses(tree: Tree)(using Context): List[ClassSymbol] = tree match { + def topLevelClasses(tree: Tree)(using Context): List[ClassSymbol] = tree match case PackageDef(_, stats) => stats.flatMap(topLevelClasses) case tdef: TypeDef if tdef.symbol.isClass => tdef.symbol.asClass :: Nil case _ => Nil - } /** The tree containing only the top-level classes and objects matching either `cls` or its companion object */ - def sliceTopLevel(tree: Tree, cls: ClassSymbol)(using Context): List[Tree] = tree match { + def sliceTopLevel(tree: Tree, cls: ClassSymbol)(using Context): List[Tree] = tree match case PackageDef(pid, stats) => val slicedStats = stats.flatMap(sliceTopLevel(_, cls)) val isEffectivelyEmpty = slicedStats.forall(_.isInstanceOf[Import]) @@ -896,7 +821,6 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => else Nil case tree => tree :: Nil - } /** The statement sequence that contains a definition of `sym`, or Nil * if none was found. @@ -905,24 +829,22 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => */ def definingStats(sym: Symbol)(using Context): List[Tree] = if (!sym.span.exists || (ctx eq NoContext) || (ctx.compilationUnit eq NoCompilationUnit)) Nil - else defPath(sym, ctx.compilationUnit.tpdTree) match { + else defPath(sym, ctx.compilationUnit.tpdTree) match case defn :: encl :: _ => def verify(stats: List[Tree]) = if (stats exists (definedSym(_) == sym)) stats else Nil - encl match { + encl match case Block(stats, _) => verify(stats) case encl: Template => verify(encl.body) case PackageDef(_, stats) => verify(stats) case _ => Nil - } case nil => Nil - } /** If `tree` is an instance of `TupleN[...](e1, ..., eN)`, the arguments `e1, ..., eN` * otherwise the empty list. */ - def tupleArgs(tree: Tree)(using Context): List[Tree] = tree match { + def tupleArgs(tree: Tree)(using Context): List[Tree] = tree match case Block(Nil, expr) => tupleArgs(expr) case Inlined(_, Nil, expr) => tupleArgs(expr) case Apply(fn: NameTree, args) @@ -930,21 +852,19 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => fn.symbol.owner.is(Module) && defn.isTupleClass(fn.symbol.owner.companionClass) => args case _ => Nil - } /** The qualifier part of a Select or Ident. * For an Ident, this is the `This` of the current class. */ - def qualifier(tree: Tree)(using Context): Tree = tree match { + def qualifier(tree: Tree)(using Context): Tree = tree match case Select(qual, _) => qual case tree: Ident => desugarIdentPrefix(tree) case _ => This(ctx.owner.enclosingClass.asClass) - } /** Is this a (potentially applied) selection of a member of a structural type * that is not a member of an underlying class or trait? */ - def isStructuralTermSelectOrApply(tree: Tree)(using Context): Boolean = { + def isStructuralTermSelectOrApply(tree: Tree)(using Context): Boolean = def isStructuralTermSelect(tree: Select) = def hasRefinement(qualtpe: Type): Boolean = qualtpe.dealias match case RefinedType(parent, rname, rinfo) => @@ -973,7 +893,6 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case _ => false loop(tree) - } /** Return a pair consisting of (supercall, rest) * @@ -982,17 +901,16 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => * The supercall is always the first statement (if it exists) */ final def splitAtSuper(constrStats: List[Tree])(implicit ctx: Context): (List[Tree], List[Tree]) = - constrStats.toList match { + constrStats.toList match case (sc: Apply) :: rest if sc.symbol.isConstructor => (sc :: Nil, rest) case (block @ Block(_, sc: Apply)) :: rest if sc.symbol.isConstructor => (block :: Nil, rest) case stats => (Nil, stats) - } /** Structural tree comparison (since == on trees is reference equality). * For the moment, only Ident, Select, Literal, Apply and TypeApply are supported */ - extension (t1: Tree) { - def === (t2: Tree)(using Context): Boolean = (t1, t2) match { + extension (t1: Tree) + def === (t2: Tree)(using Context): Boolean = (t1, t2) match case (t1: Ident, t2: Ident) => t1.symbol == t2.symbol case (t1 @ Select(q1, _), t2 @ Select(q2, _)) => @@ -1005,27 +923,22 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => f1 === f2 && ts1.tpes.corresponds(ts2.tpes)(_ =:= _) case _ => false - } def hash(using Context): Int = - t1.getClass.hashCode * 37 + { - t1 match { + t1.getClass.hashCode * 37 `+`: + t1 match case t1: Ident => t1.symbol.hashCode case t1 @ Select(q1, _) => t1.symbol.hashCode * 41 + q1.hash case Literal(c1) => c1.hashCode case Apply(f1, as1) => as1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.hash) case TypeApply(f1, ts1) => ts1.foldLeft(f1.hash)((h, arg) => h * 41 + arg.tpe.hash) case _ => t1.hashCode - } - } - } def assertAllPositioned(tree: Tree)(using Context): Unit = - tree.foreachSubTree { + tree.foreachSubTree: case t: WithoutTypeOrPos[_] => case t => assert(t.span.exists, i"$t") - } - object QuotedTypeOf { + object QuotedTypeOf: /** Extracts the content of a quoted tree. * The result can be the contents of a term or type quote, which * will return a term or type tree respectively. @@ -1036,17 +949,15 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => val TypeApply(_, body :: _) = tree.fun: @unchecked Some(body) else None - } /** Extractors for type splices */ - object SplicedType { + object SplicedType: /** Extracts the content of a spliced type tree. * The result can be the contents of a type splice, which * will return a type tree. */ def unapply(tree: tpd.Select)(using Context): Option[tpd.Tree] = if tree.symbol.isTypeSplice then Some(tree.qualifier) else None - } /** Extractor for not-null assertions. * A not-null assertion for reference `x` has the form `x.$asInstanceOf$[x.type & T]`. @@ -1067,7 +978,7 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => case _ => None end AssertNotNull - object ConstantValue { + object ConstantValue: def unapply(tree: Tree)(using Context): Option[Any] = tree match case Typed(expr, _) => unapply(expr) @@ -1077,18 +988,16 @@ trait TypedTreeInfo extends TreeInfo[Type] { self: Trees.Instance[Type] => tree.tpe.widenTermRefExpr.dealias.normalized match case ConstantType(Constant(x)) => Some(x) case _ => None - } } -object TreeInfo { +object TreeInfo: /** A purity level is represented as a bitset (expressed as an Int) */ - class PurityLevel(val x: Int) extends AnyVal { + class PurityLevel(val x: Int) extends AnyVal: /** `this` contains the bits of `that` */ def >= (that: PurityLevel): Boolean = (x & that.x) == that.x /** The intersection of the bits of `this` and `that` */ def min(that: PurityLevel): PurityLevel = new PurityLevel(x & that.x) - } /** An expression is a stable path. Requires that expression is at least idempotent */ val Path: PurityLevel = new PurityLevel(4) @@ -1106,4 +1015,3 @@ object TreeInfo { /** A stable path that is also idempotent */ val IdempotentPath: PurityLevel = new PurityLevel(Idempotent.x | Path.x) -} diff --git a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala index ae674c25dc3d..7f7fcad82d6c 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeMapWithImplicits.scala @@ -13,42 +13,37 @@ import core.TypeError * * This incudes implicits defined in scope as well as imported implicits. */ -class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { +class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts: import tpd._ def transformSelf(vd: ValDef)(using Context): ValDef = cpy.ValDef(vd)(tpt = transform(vd.tpt)) - private def nestedScopeCtx(defs: List[Tree])(using Context): Context = { + private def nestedScopeCtx(defs: List[Tree])(using Context): Context = val nestedCtx = ctx.fresh.setNewScope - defs foreach { + defs foreach: case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => nestedCtx.enter(d.symbol) case _ => - } nestedCtx - } - private def patternScopeCtx(pattern: Tree)(using Context): Context = { + private def patternScopeCtx(pattern: Tree)(using Context): Context = val nestedCtx = ctx.fresh.setNewScope - pattern.foreachSubTree { + pattern.foreachSubTree: case d: DefTree if d.symbol.isOneOf(GivenOrImplicitVal) => nestedCtx.enter(d.symbol) case _ => - } nestedCtx - } - override def transform(tree: Tree)(using Context): Tree = { - try tree match { + override def transform(tree: Tree)(using Context): Tree = + try tree match case Block(stats, expr) => super.transform(tree)(using nestedScopeCtx(stats)) case tree: DefDef => - inContext(localCtx(tree)) { + inContext(localCtx(tree)): cpy.DefDef(tree)( tree.name, transformParamss(tree.paramss), transform(tree.tpt), transform(tree.rhs)(using nestedScopeCtx(tree.paramss.flatten))) - } case impl @ Template(constr, _, self, _) => cpy.Template(tree)( transformSub(constr), @@ -65,12 +60,8 @@ class TreeMapWithImplicits extends tpd.TreeMapWithPreciseStatContexts { ) case _ => super.transform(tree) - } - catch { + catch case ex: TypeError => report.error(ex, tree.srcPos) tree - } - } -} diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 955892b2ae22..35d5b7ba1d65 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -38,7 +38,7 @@ class TreeTypeMap( val newOwners: List[Symbol] = Nil, val substFrom: List[Symbol] = Nil, val substTo: List[Symbol] = Nil, - cpy: tpd.TreeCopier = tpd.cpy)(using Context) extends tpd.TreeMap(cpy) { + cpy: tpd.TreeCopier = tpd.cpy)(using Context) extends tpd.TreeMap(cpy): import tpd._ def copy( @@ -56,34 +56,29 @@ class TreeTypeMap( /** Replace occurrences of `This(oldOwner)` in some prefix of a type * by the corresponding `This(newOwner)`. */ - private val mapOwnerThis = new TypeMap with cc.CaptureSet.IdempotentCaptRefMap { - private def mapPrefix(from: List[Symbol], to: List[Symbol], tp: Type): Type = from match { + private val mapOwnerThis = new TypeMap with cc.CaptureSet.IdempotentCaptRefMap: + private def mapPrefix(from: List[Symbol], to: List[Symbol], tp: Type): Type = from match case Nil => tp case (cls: ClassSymbol) :: from1 => mapPrefix(from1, to.tail, tp.substThis(cls, to.head.thisType)) case _ :: from1 => mapPrefix(from1, to.tail, tp) - } - def apply(tp: Type): Type = tp match { + def apply(tp: Type): Type = tp match case tp: NamedType => tp.derivedSelect(mapPrefix(oldOwners, newOwners, tp.prefix)) case _ => mapOver(tp) - } - } def mapType(tp: Type): Type = mapOwnerThis(typeMap(tp).substSym(substFrom, substTo)) private def updateDecls(prevStats: List[Tree], newStats: List[Tree]): Unit = if (prevStats.isEmpty) assert(newStats.isEmpty) - else { - prevStats.head match { + else + prevStats.head match case pdef: MemberDef => val prevSym = pdef.symbol val newSym = newStats.head.symbol val newCls = newSym.owner.asClass if (prevSym != newSym) newCls.replace(prevSym, newSym) case _ => - } updateDecls(prevStats.tail, newStats.tail) - } def transformInlined(tree: Inlined)(using Context): Tree = val Inlined(call, bindings, expanded) = tree @@ -91,7 +86,7 @@ class TreeTypeMap( val expanded1 = tmap1.transform(expanded) cpy.Inlined(tree)(call, bindings1, expanded1) - override def transform(tree: Tree)(using Context): Tree = treeMap(tree) match { + override def transform(tree: Tree)(using Context): Tree = treeMap(tree) match case impl @ Template(constr, _, self, _) => val tmap = withMappedSyms(localSyms(impl :: self :: Nil)) cpy.Template(impl)( @@ -102,7 +97,7 @@ class TreeTypeMap( (tmap.transform(_)(using ctx.withOwner(mapOwner(impl.symbol.owner)))) ).withType(tmap.mapType(impl.tpe)) case tree1 => - tree1.withType(mapType(tree1.tpe)) match { + tree1.withType(mapType(tree1.tpe)) match case id: Ident => if needsSelect(id.tpe) then ref(id.tpe.asInstanceOf[TermRef]).withSpan(id.span) @@ -125,10 +120,9 @@ class TreeTypeMap( val (tmap1, paramss1) = transformAllParamss(paramss) val res = cpy.DefDef(ddef)(name, paramss1, tmap1.transform(tpt), tmap1.transform(ddef.rhs)) res.symbol.setParamssFromDefs(paramss1) - res.symbol.transformAnnotations { + res.symbol.transformAnnotations: case ann: BodyAnnotation => ann.derivedAnnotation(transform(ann.tree)) case ann => ann - } res case tdef @ LambdaTypeTree(tparams, body) => val (tmap1, tparams1) = transformDefs(tparams) @@ -148,16 +142,13 @@ class TreeTypeMap( cpy.Labeled(labeled)(bind1, expr1) case tree1 => super.transform(tree1) - } - } override def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = transformDefs(trees)._2 - def transformDefs[TT <: Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = { + def transformDefs[TT <: Tree](trees: List[TT])(using Context): (TreeTypeMap, List[TT]) = val tmap = withMappedSyms(localSyms(trees)) (tmap, tmap.transformSub(trees)) - } private def transformAllParamss(paramss: List[ParamClause]): (TreeTypeMap, List[ParamClause]) = paramss match case params :: paramss1 => @@ -177,7 +168,7 @@ class TreeTypeMap( /** The current tree map composed with a substitution [from -> to] */ def withSubstitution(from: List[Symbol], to: List[Symbol]): TreeTypeMap = if (from eq to) this - else { + else // assert that substitution stays idempotent, assuming its parts are // TODO: It might be better to cater for the asserted-away conditions, by // setting up a proper substitution abstraction with a compose operator that @@ -194,7 +185,6 @@ class TreeTypeMap( to ++ newOwners, from ++ substFrom, to ++ substTo) - } /** Apply `typeMap` and `ownerMap` to given symbols `syms` * and return a treemap that contains the substitution @@ -233,4 +223,3 @@ class TreeTypeMap( |newOwners = ${showSyms(newOwners)} |substFrom = ${showSyms(substFrom)} |substTo = ${showSyms(substTo)}""".stripMargin -} diff --git a/compiler/src/dotty/tools/dotc/ast/Trees.scala b/compiler/src/dotty/tools/dotc/ast/Trees.scala index 54c15b9909fa..8b99a3cd9af6 100644 --- a/compiler/src/dotty/tools/dotc/ast/Trees.scala +++ b/compiler/src/dotty/tools/dotc/ast/Trees.scala @@ -19,7 +19,7 @@ import compiletime.uninitialized import Decorators._ import staging.StagingLevel.* -object Trees { +object Trees: type Untyped = Type | Null @@ -48,7 +48,7 @@ object Trees { * nodes. */ abstract class Tree[+T <: Untyped](implicit @constructorOnly src: SourceFile) - extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable { + extends Positioned, SrcPos, Product, Attachment.Container, printing.Showable: if (Stats.enabled) ntrees += 1 @@ -86,7 +86,7 @@ object Trees { /** Return a typed tree that's isomorphic to this tree, but has given * type. (Overridden by empty trees) */ - def withType(tpe: Type)(using Context): ThisTree[Type] = { + def withType(tpe: Type)(using Context): ThisTree[Type] = if (tpe.isInstanceOf[ErrorType]) assert(!Config.checkUnreportedErrors || ctx.reporter.errorsReported || @@ -96,7 +96,6 @@ object Trees { else if (Config.checkTreesConsistent) checkChildrenTyped(productIterator) withTypeUnchecked(tpe) - } /** Check that typed trees don't refer to untyped ones, except if * - the parent tree is an import, or @@ -106,23 +105,21 @@ object Trees { private def checkChildrenTyped(it: Iterator[Any])(using Context): Unit = if (!this.isInstanceOf[Import[?]]) while (it.hasNext) - it.next() match { + it.next() match case x: Ident[?] => // untyped idents are used in a number of places in typed trees case x: Tree[?] => assert(x.hasType || ctx.reporter.errorsReported, s"$this has untyped child $x") case xs: List[?] => checkChildrenTyped(xs.iterator) case _ => - } - def withTypeUnchecked(tpe: Type): ThisTree[Type] = { + def withTypeUnchecked(tpe: Type): ThisTree[Type] = val tree = (if (myTpe == null || (myTpe.asInstanceOf[AnyRef] eq tpe.asInstanceOf[AnyRef])) this - else cloneIn(source)).asInstanceOf[Tree[Type]] + else cloneIn(source)).asInstanceOf[Tree[Type]] tree overwriteType tpe tree.asInstanceOf[ThisTree[Type]] - } /** Does the tree have its type field set? Note: this operation is not * referentially transparent, because it can observe the withType @@ -169,16 +166,14 @@ object Trees { if (this eq genericEmptyTree) that else this /** The number of nodes in this tree */ - def treeSize: Int = { + def treeSize: Int = var s = 1 - def addSize(elem: Any): Unit = elem match { + def addSize(elem: Any): Unit = elem match case t: Tree[?] => s += t.treeSize case ts: List[?] => ts foreach addSize case _ => - } productIterator foreach addSize s - } /** If this is a thicket, perform `op` on each of its trees * otherwise, perform `op` ion tree itself. @@ -187,38 +182,30 @@ object Trees { override def toText(printer: Printer): Text = printer.toText(this) - def sameTree(that: Tree[?]): Boolean = { + def sameTree(that: Tree[?]): Boolean = def isSame(x: Any, y: Any): Boolean = - x.asInstanceOf[AnyRef].eq(y.asInstanceOf[AnyRef]) || { - x match { + x.asInstanceOf[AnyRef].eq(y.asInstanceOf[AnyRef]) `||`: + x match case x: Tree[?] => - y match { + y match case y: Tree[?] => x.sameTree(y) case _ => false - } case x: List[?] => - y match { + y match case y: List[?] => x.corresponds(y)(isSame) case _ => false - } case _ => false - } - } - this.getClass == that.getClass && { + this.getClass == that.getClass `&&`: val it1 = this.productIterator val it2 = that.productIterator it1.corresponds(it2)(isSame) - } - } override def hashCode(): Int = System.identityHashCode(this) override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] - } - class UnAssignedTypeException[T <: Untyped](tree: Tree[T]) extends RuntimeException { + class UnAssignedTypeException[T <: Untyped](tree: Tree[T]) extends RuntimeException: override def getMessage: String = s"type of $tree is not assigned" - } type LazyTree[+T <: Untyped] = Tree[T] | Lazy[Tree[T]] type LazyTreeList[+T <: Untyped] = List[Tree[T]] | Lazy[List[Tree[T]]] @@ -228,63 +215,56 @@ object Trees { /** Instances of this class are trees for which isType is definitely true. * Note that some trees have isType = true without being TypTrees (e.g. Ident, Annotated) */ - trait TypTree[+T <: Untyped] extends Tree[T] { + trait TypTree[+T <: Untyped] extends Tree[T]: type ThisTree[+T <: Untyped] <: TypTree[T] override def isType: Boolean = true - } /** Instances of this class are trees for which isTerm is definitely true. * Note that some trees have isTerm = true without being TermTrees (e.g. Ident, Annotated) */ - trait TermTree[+T <: Untyped] extends Tree[T] { + trait TermTree[+T <: Untyped] extends Tree[T]: type ThisTree[+T <: Untyped] <: TermTree[T] override def isTerm: Boolean = true - } /** Instances of this class are trees which are not terms but are legal * parts of patterns. */ - trait PatternTree[+T <: Untyped] extends Tree[T] { + trait PatternTree[+T <: Untyped] extends Tree[T]: type ThisTree[+T <: Untyped] <: PatternTree[T] override def isPattern: Boolean = true - } /** Tree's denotation can be derived from its type */ - abstract class DenotingTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + abstract class DenotingTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T]: type ThisTree[+T <: Untyped] <: DenotingTree[T] override def denot(using Context): Denotation = typeOpt.stripped match case tpe: NamedType => tpe.denot case tpe: ThisType => tpe.cls.denot case _ => NoDenotation - } /** Tree's denot/isType/isTerm properties come from a subtree * identified by `forwardTo`. */ - abstract class ProxyTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T] { + abstract class ProxyTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends Tree[T]: type ThisTree[+T <: Untyped] <: ProxyTree[T] def forwardTo: Tree[T] override def denot(using Context): Denotation = forwardTo.denot override def isTerm: Boolean = forwardTo.isTerm override def isType: Boolean = forwardTo.isType - } /** Tree has a name */ - abstract class NameTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T] { + abstract class NameTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends DenotingTree[T]: type ThisTree[+T <: Untyped] <: NameTree[T] def name: Name - } /** Tree refers by name to a denotation */ - abstract class RefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T] { + abstract class RefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NameTree[T]: type ThisTree[+T <: Untyped] <: RefTree[T] def qualifier: Tree[T] override def isType: Boolean = name.isTypeName override def isTerm: Boolean = name.isTermName - } /** Tree defines a new symbol */ - trait DefTree[+T <: Untyped] extends DenotingTree[T] { + trait DefTree[+T <: Untyped] extends DenotingTree[T]: type ThisTree[+T <: Untyped] <: DefTree[T] private var myMods: untpd.Modifiers | Null = uninitialized @@ -294,11 +274,10 @@ object Trees { def withAnnotations(annots: List[untpd.Tree]): ThisTree[Untyped] = withMods(rawMods.withAnnotations(annots)) - def withMods(mods: untpd.Modifiers): ThisTree[Untyped] = { + def withMods(mods: untpd.Modifiers): ThisTree[Untyped] = val tree = if (myMods == null || (myMods == mods)) this else cloneIn(source) tree.setMods(mods) tree.asInstanceOf[ThisTree[Untyped]] - } def withFlags(flags: FlagSet): ThisTree[Untyped] = withMods(untpd.Modifiers(flags)) def withAddedFlags(flags: FlagSet): ThisTree[Untyped] = withMods(rawMods | flags) @@ -308,7 +287,6 @@ object Trees { override def isDef: Boolean = true def namedType: NamedType = tpe.asInstanceOf[NamedType] - } extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = mdef.rawMods @@ -356,7 +334,7 @@ object Trees { end WithEndMarker abstract class NamedDefTree[+T <: Untyped](implicit @constructorOnly src: SourceFile) - extends NameTree[T] with DefTree[T] with WithEndMarker[T] { + extends NameTree[T] with DefTree[T] with WithEndMarker[T]: type ThisTree[+T <: Untyped] <: NamedDefTree[T] protected def srcName(using Context): Name = @@ -372,14 +350,12 @@ object Trees { * Overridden in Bind */ def nameSpan(using Context): Span = - if (span.exists) { + if (span.exists) val point = span.point if (rawMods.is(Synthetic) || span.isSynthetic || name.toTermName == nme.ERROR) Span(point) - else { + else val realName = srcName.stripModuleClassSuffix.lastPart Span(point, point + realName.length, point) - } - } else span /** The source position of the name defined by this definition. @@ -387,34 +363,30 @@ object Trees { * if the definition comes from source. */ def namePos(using Context): SourcePosition = source.atSpan(nameSpan) - } /** Tree defines a new symbol and carries modifiers. * The position of a MemberDef contains only the defined identifier or pattern. * The envelope of a MemberDef contains the whole definition and has its point * on the opening keyword (or the next token after that if keyword is missing). */ - abstract class MemberDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T] { + abstract class MemberDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends NamedDefTree[T]: type ThisTree[+T <: Untyped] <: MemberDef[T] def rawComment: Option[Comment] = getAttachment(DocComment) - def setComment(comment: Option[Comment]): this.type = { + def setComment(comment: Option[Comment]): this.type = comment.map(putAttachment(DocComment, _)) this - } def name: Name - } /** A ValDef or DefDef tree */ - abstract class ValOrDefDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T], WithLazyFields { + abstract class ValOrDefDef[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends MemberDef[T], WithLazyFields: type ThisTree[+T <: Untyped] <: ValOrDefDef[T] def name: TermName def tpt: Tree[T] def unforcedRhs: LazyTree[T] def rhs(using Context): Tree[T] - } trait ValOrTypeDef[+T <: Untyped] extends MemberDef[T]: type ThisTree[+T <: Untyped] <: ValOrTypeDef[T] @@ -425,22 +397,20 @@ object Trees { /** name */ case class Ident[+T <: Untyped] private[ast] (name: Name)(implicit @constructorOnly src: SourceFile) - extends RefTree[T] { + extends RefTree[T]: type ThisTree[+T <: Untyped] = Ident[T] def qualifier: Tree[T] = genericEmptyTree def isBackquoted: Boolean = hasAttachment(Backquoted) - } class SearchFailureIdent[+T <: Untyped] private[ast] (name: Name, expl: => String)(implicit @constructorOnly src: SourceFile) - extends Ident[T](name) { + extends Ident[T](name): def explanation = expl override def toString: String = s"SearchFailureIdent($explanation)" - } /** qualifier.name, or qualifier#name, if qualifier is a type */ case class Select[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name)(implicit @constructorOnly src: SourceFile) - extends RefTree[T] { + extends RefTree[T]: type ThisTree[+T <: Untyped] = Select[T] override def denot(using Context): Denotation = typeOpt match @@ -461,40 +431,34 @@ object Trees { else Span(point, span.end, point) else span - } class SelectWithSig[+T <: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)(implicit @constructorOnly src: SourceFile) - extends Select[T](qualifier, name) { + extends Select[T](qualifier, name): override def toString: String = s"SelectWithSig($qualifier, $name, $sig)" - } /** qual.this */ case class This[+T <: Untyped] private[ast] (qual: untpd.Ident)(implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] with TermTree[T] { + extends DenotingTree[T] with TermTree[T]: type ThisTree[+T <: Untyped] = This[T] // Denotation of a This tree is always the underlying class; needs correction for modules. override def denot(using Context): Denotation = - typeOpt match { + typeOpt match case tpe @ TermRef(pre, _) if tpe.symbol.is(Module) => tpe.symbol.moduleClass.denot.asSeenFrom(pre) case _ => super.denot - } - } /** C.super[mix], where qual = C.this */ case class Super[+T <: Untyped] private[ast] (qual: Tree[T], mix: untpd.Ident)(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TermTree[T] { + extends ProxyTree[T] with TermTree[T]: type ThisTree[+T <: Untyped] = Super[T] def forwardTo: Tree[T] = qual - } - abstract class GenericApply[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T] { + abstract class GenericApply[+T <: Untyped](implicit @constructorOnly src: SourceFile) extends ProxyTree[T] with TermTree[T]: type ThisTree[+T <: Untyped] <: GenericApply[T] val fun: Tree[T] val args: List[Tree[T]] def forwardTo: Tree[T] = fun - } object GenericApply: def unapply[T <: Untyped](tree: Tree[T]): Option[(Tree[T], List[Tree[T]])] = tree match @@ -509,7 +473,7 @@ object Trees { /** fun(args) */ case class Apply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends GenericApply[T] { + extends GenericApply[T]: type ThisTree[+T <: Untyped] = Apply[T] def setApplyKind(kind: ApplyKind) = @@ -521,64 +485,54 @@ object Trees { */ def applyKind: ApplyKind = attachmentOrElse(untpd.KindOfApply, ApplyKind.Regular) - } /** fun[args] */ case class TypeApply[+T <: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends GenericApply[T] { + extends GenericApply[T]: type ThisTree[+T <: Untyped] = TypeApply[T] - } /** const */ case class Literal[+T <: Untyped] private[ast] (const: Constant)(implicit @constructorOnly src: SourceFile) - extends Tree[T] with TermTree[T] { + extends Tree[T] with TermTree[T]: type ThisTree[+T <: Untyped] = Literal[T] - } /** new tpt, but no constructor call */ case class New[+T <: Untyped] private[ast] (tpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] with TermTree[T] { + extends Tree[T] with TermTree[T]: type ThisTree[+T <: Untyped] = New[T] - } /** expr : tpt */ case class Typed[+T <: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TermTree[T] { + extends ProxyTree[T] with TermTree[T]: type ThisTree[+T <: Untyped] = Typed[T] def forwardTo: Tree[T] = expr - } /** name = arg, in a parameter list */ case class NamedArg[+T <: Untyped] private[ast] (name: Name, arg: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { + extends Tree[T]: type ThisTree[+T <: Untyped] = NamedArg[T] - } /** name = arg, outside a parameter list */ case class Assign[+T <: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { + extends TermTree[T]: type ThisTree[+T <: Untyped] = Assign[T] - } /** { stats; expr } */ case class Block[+T <: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { + extends Tree[T]: type ThisTree[+T <: Untyped] = Block[T] override def isType: Boolean = expr.isType override def isTerm: Boolean = !isType // this will classify empty trees as terms, which is necessary - } /** if cond then thenp else elsep */ case class If[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { + extends TermTree[T]: type ThisTree[+T <: Untyped] = If[T] def isInline = false - } class InlineIf[+T <: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])(implicit @constructorOnly src: SourceFile) - extends If(cond, thenp, elsep) { + extends If(cond, thenp, elsep): override def isInline = true override def toString = s"InlineIf($cond, $thenp, $elsep)" - } /** A closure with an environment and a reference to a method. * @param env The captured parameters of the closure @@ -590,34 +544,29 @@ object Trees { * given in `tpt`, which must be a SAM type. */ case class Closure[+T <: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { + extends TermTree[T]: type ThisTree[+T <: Untyped] = Closure[T] - } /** selector match { cases } */ case class Match[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { + extends TermTree[T]: type ThisTree[+T <: Untyped] = Match[T] def isInline = false - } class InlineMatch[+T <: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) - extends Match(selector, cases) { + extends Match(selector, cases): override def isInline = true override def toString = s"InlineMatch($selector, $cases)" - } /** case pat if guard => body */ case class CaseDef[+T <: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { + extends Tree[T]: type ThisTree[+T <: Untyped] = CaseDef[T] - } /** label[tpt]: { expr } */ case class Labeled[+T <: Untyped] private[ast] (bind: Bind[T], expr: Tree[T])(implicit @constructorOnly src: SourceFile) - extends NameTree[T] { + extends NameTree[T]: type ThisTree[+T <: Untyped] = Labeled[T] def name: Name = bind.name - } /** return expr * where `from` refers to the method or label from which the return takes place @@ -625,35 +574,30 @@ object Trees { * closures can intervene. */ case class Return[+T <: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { + extends TermTree[T]: type ThisTree[+T <: Untyped] = Return[T] - } /** while (cond) { body } */ case class WhileDo[+T <: Untyped] private[ast] (cond: Tree[T], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { + extends TermTree[T]: type ThisTree[+T <: Untyped] = WhileDo[T] - } /** try block catch cases finally finalizer */ case class Try[+T <: Untyped] private[ast] (expr: Tree[T], cases: List[CaseDef[T]], finalizer: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { + extends TermTree[T]: type ThisTree[+T <: Untyped] = Try[T] - } /** Seq(elems) * @param tpt The element type of the sequence. */ case class SeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { + extends Tree[T]: type ThisTree[+T <: Untyped] = SeqLiteral[T] - } /** Array(elems) */ class JavaSeqLiteral[+T <: Untyped] private[ast] (elems: List[Tree[T]], elemtpt: Tree[T])(implicit @constructorOnly src: SourceFile) - extends SeqLiteral(elems, elemtpt) { + extends SeqLiteral(elems, elemtpt): override def toString: String = s"JavaSeqLiteral($elems, $elemtpt)" - } /** A tree representing inlined code. * @@ -672,11 +616,10 @@ object Trees { * call, whereas `expansion` represents the body of the inlined function. */ case class Inlined[+T <: Untyped] private[ast] (call: tpd.Tree, bindings: List[MemberDef[T]], expansion: Tree[T])(implicit @constructorOnly src: SourceFile) - extends Tree[T] { + extends Tree[T]: type ThisTree[+T <: Untyped] = Inlined[T] override def isTerm = expansion.isTerm override def isType = expansion.isType - } /** A tree representing a quote `'{ body }` or `'[ body ]`. * `Quote`s are created by the `Parser`. In typer they can be typed as a @@ -698,7 +641,7 @@ object Trees { * @param tags Term references to instances of `Type[T]` for `T`s that are used in the quote */ case class Quote[+T <: Untyped] private[ast] (body: Tree[T], tags: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { + extends TermTree[T]: type ThisTree[+T <: Untyped] = Quote[T] /** Is this a type quote `'[tpe]' */ @@ -719,7 +662,6 @@ object Trees { defn.FunctionType(1, isContextual = true) .appliedTo(defn.QuotesClass.typeRef, exprType) withType(quoteType) - } /** A tree representing a splice `${ expr }` * @@ -733,9 +675,8 @@ object Trees { * @param expr The tree that was spliced */ case class Splice[+T <: Untyped] private[ast] (expr: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { + extends TermTree[T]: type ThisTree[+T <: Untyped] = Splice[T] - } /** A tree representing a pattern splice `${ pattern }`, `$ident` or `$ident(args*)` in a quote pattern. * @@ -749,18 +690,16 @@ object Trees { * @param args The arguments of the splice (the HOAS arguments) */ case class SplicePattern[+T <: Untyped] private[ast] (body: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends TermTree[T] { + extends TermTree[T]: type ThisTree[+T <: Untyped] = SplicePattern[T] - } /** A type tree that represents an existing or inferred type */ case class TypeTree[+T <: Untyped]()(implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] with TypTree[T] { + extends DenotingTree[T] with TypTree[T]: type ThisTree[+T <: Untyped] = TypeTree[T] override def isEmpty: Boolean = !hasType override def toString: String = s"TypeTree${if (hasType) s"[$typeOpt]" else ""}" - } /** Tree that replaces a level 1 splices in pickled (level 0) quotes. * It is only used when picking quotes (will never be in a TASTy file). @@ -770,10 +709,9 @@ object Trees { * @param args The arguments of the splice to compute its content * @param content Lambda that computes the content of the hole. This tree is empty when in a quote pickle. */ - case class Hole[+T <: Untyped](override val isTerm: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T] { + case class Hole[+T <: Untyped](override val isTerm: Boolean, idx: Int, args: List[Tree[T]], content: Tree[T])(implicit @constructorOnly src: SourceFile) extends Tree[T]: type ThisTree[+T <: Untyped] <: Hole[T] override def isType: Boolean = !isTerm - } /** A type tree whose type is inferred. These trees appear in two contexts * - as an argument of a TypeApply. In that case its type is always a TypeVar @@ -784,23 +722,20 @@ object Trees { /** ref.type */ case class SingletonTypeTree[+T <: Untyped] private[ast] (ref: Tree[T])(implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] with TypTree[T] { + extends DenotingTree[T] with TypTree[T]: type ThisTree[+T <: Untyped] = SingletonTypeTree[T] - } /** tpt { refinements } */ case class RefinedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TypTree[T] { + extends ProxyTree[T] with TypTree[T]: type ThisTree[+T <: Untyped] = RefinedTypeTree[T] def forwardTo: Tree[T] = tpt - } /** tpt[args] */ case class AppliedTypeTree[+T <: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with TypTree[T] { + extends ProxyTree[T] with TypTree[T]: type ThisTree[+T <: Untyped] = AppliedTypeTree[T] def forwardTo: Tree[T] = tpt - } /** [typeparams] -> tpt * @@ -826,51 +761,44 @@ object Trees { * example, to make "find all references" work in the IDE). */ case class LambdaTypeTree[+T <: Untyped] private[ast] (tparams: List[TypeDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { + extends TypTree[T]: type ThisTree[+T <: Untyped] = LambdaTypeTree[T] - } case class TermLambdaTypeTree[+T <: Untyped] private[ast] (params: List[ValDef[T]], body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { + extends TypTree[T]: type ThisTree[+T <: Untyped] = TermLambdaTypeTree[T] - } /** [bound] selector match { cases } */ case class MatchTypeTree[+T <: Untyped] private[ast] (bound: Tree[T], selector: Tree[T], cases: List[CaseDef[T]])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { + extends TypTree[T]: type ThisTree[+T <: Untyped] = MatchTypeTree[T] - } /** => T */ case class ByNameTypeTree[+T <: Untyped] private[ast] (result: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { + extends TypTree[T]: type ThisTree[+T <: Untyped] = ByNameTypeTree[T] - } /** >: lo <: hi * >: lo <: hi = alias for RHS of bounded opaque type */ case class TypeBoundsTree[+T <: Untyped] private[ast] (lo: Tree[T], hi: Tree[T], alias: Tree[T])(implicit @constructorOnly src: SourceFile) - extends TypTree[T] { + extends TypTree[T]: type ThisTree[+T <: Untyped] = TypeBoundsTree[T] - } /** name @ body */ case class Bind[+T <: Untyped] private[ast] (name: Name, body: Tree[T])(implicit @constructorOnly src: SourceFile) - extends NamedDefTree[T] with PatternTree[T] { + extends NamedDefTree[T] with PatternTree[T]: type ThisTree[+T <: Untyped] = Bind[T] override def isType: Boolean = name.isTypeName override def isTerm: Boolean = name.isTermName override def nameSpan(using Context): Span = if span.exists then Span(span.start, span.start + name.toString.length) else span - } /** tree_1 | ... | tree_n */ case class Alternative[+T <: Untyped] private[ast] (trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends PatternTree[T] { + extends PatternTree[T]: type ThisTree[+T <: Untyped] = Alternative[T] - } /** The typed translation of `extractor(patterns)` in a pattern. The translation has the following * components: @@ -887,26 +815,24 @@ object Trees { * if (result.isDefined) "match patterns against result" */ case class UnApply[+T <: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with PatternTree[T] { + extends ProxyTree[T] with PatternTree[T]: type ThisTree[+T <: Untyped] = UnApply[T] def forwardTo = fun - } /** mods val name: tpt = rhs */ case class ValDef[+T <: Untyped] private[ast] (name: TermName, tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) - extends ValOrDefDef[T], ValOrTypeDef[T] { + extends ValOrDefDef[T], ValOrTypeDef[T]: type ThisTree[+T <: Untyped] = ValDef[T] assert(isEmpty || (tpt ne genericEmptyTree)) def unforcedRhs: LazyTree[T] = preRhs def forceFields()(using Context): Unit = preRhs = force(preRhs) def rhs(using Context): Tree[T] = { forceFields(); preRhs.asInstanceOf[Tree[T]] } - } /** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */ case class DefDef[+T <: Untyped] private[ast] (name: TermName, paramss: List[ParamClause[T]], tpt: Tree[T], private var preRhs: LazyTree[T])(implicit @constructorOnly src: SourceFile) - extends ValOrDefDef[T] { + extends ValOrDefDef[T]: type ThisTree[+T <: Untyped] = DefDef[T] assert(tpt ne genericEmptyTree) @@ -925,7 +851,6 @@ object Trees { def termParamss(using Context): List[List[ValDef[T]]] = (if ctx.erasedTypes then paramss else untpd.termParamssIn(paramss)) .asInstanceOf[List[List[ValDef[T]]]] - } /** mods class name template or * mods trait name template or @@ -934,14 +859,13 @@ object Trees { * mods type name >: lo <: hi = rhs if rhs = TypeBoundsTree(lo, hi, alias) and opaque in mods */ case class TypeDef[+T <: Untyped] private[ast] (name: TypeName, rhs: Tree[T])(implicit @constructorOnly src: SourceFile) - extends MemberDef[T], ValOrTypeDef[T] { + extends MemberDef[T], ValOrTypeDef[T]: type ThisTree[+T <: Untyped] = TypeDef[T] /** Is this a definition of a class? */ def isClassDef: Boolean = rhs.isInstanceOf[Template[?]] def isBackquoted: Boolean = hasAttachment(Backquoted) - } /** extends parents { self => body } * @param preParentsOrDerived A list of parents followed by a list of derived classes, @@ -949,7 +873,7 @@ object Trees { * Typed templates only have parents. */ case class Template[+T <: Untyped] private[ast] (constr: DefDef[T], private var preParentsOrDerived: LazyTreeList[T], self: ValDef[T], private var preBody: LazyTreeList[T])(implicit @constructorOnly src: SourceFile) - extends DefTree[T] with WithLazyFields { + extends DefTree[T] with WithLazyFields: type ThisTree[+T <: Untyped] = Template[T] def forceFields()(using Context): Unit = @@ -962,54 +886,47 @@ object Trees { def parents(using Context): List[Tree[T]] = parentsOrDerived // overridden by DerivingTemplate def derived: List[untpd.Tree] = Nil // overridden by DerivingTemplate - } abstract class ImportOrExport[+T <: Untyped](implicit @constructorOnly src: SourceFile) - extends DenotingTree[T] { + extends DenotingTree[T]: type ThisTree[+T <: Untyped] <: ImportOrExport[T] val expr: Tree[T] val selectors: List[untpd.ImportSelector] - } /** import expr.selectors * where a selector is either an untyped `Ident`, `name` or * an untyped thicket consisting of `name` and `rename`. */ case class Import[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) - extends ImportOrExport[T] { + extends ImportOrExport[T]: type ThisTree[+T <: Untyped] = Import[T] - } /** export expr.selectors * where a selector is either an untyped `Ident`, `name` or * an untyped thicket consisting of `name` and `rename`. */ case class Export[+T <: Untyped] private[ast] (expr: Tree[T], selectors: List[untpd.ImportSelector])(implicit @constructorOnly src: SourceFile) - extends ImportOrExport[T] { + extends ImportOrExport[T]: type ThisTree[+T <: Untyped] = Export[T] - } /** package pid { stats } */ case class PackageDef[+T <: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] with WithEndMarker[T] { + extends ProxyTree[T] with WithEndMarker[T]: type ThisTree[+T <: Untyped] = PackageDef[T] def forwardTo: RefTree[T] = pid protected def srcName(using Context): Name = pid.name - } /** arg @annot */ case class Annotated[+T <: Untyped] private[ast] (arg: Tree[T], annot: Tree[T])(implicit @constructorOnly src: SourceFile) - extends ProxyTree[T] { + extends ProxyTree[T]: type ThisTree[+T <: Untyped] = Annotated[T] def forwardTo: Tree[T] = arg - } - trait WithoutTypeOrPos[+T <: Untyped] extends Tree[T] { + trait WithoutTypeOrPos[+T <: Untyped] extends Tree[T]: override def withTypeUnchecked(tpe: Type): ThisTree[Type] = this.asInstanceOf[ThisTree[Type]] override def span: Span = NoSpan override def span_=(span: Span): Unit = {} - } /** Temporary class that results from translation of ModuleDefs * (and possibly other statements). @@ -1017,17 +934,16 @@ object Trees { * a `transform(List[Tree])` call. */ case class Thicket[+T <: Untyped](trees: List[Tree[T]])(implicit @constructorOnly src: SourceFile) - extends Tree[T] with WithoutTypeOrPos[T] { + extends Tree[T] with WithoutTypeOrPos[T]: myTpe = NoType.asInstanceOf[T] type ThisTree[+T <: Untyped] = Thicket[T] - def mapElems[U >: T <: Untyped](op: Tree[T] => Tree[U]): Thicket[U] = { + def mapElems[U >: T <: Untyped](op: Tree[T] => Tree[U]): Thicket[U] = val newTrees = trees.mapConserve(op) if (trees eq newTrees) this else Thicket[U](newTrees)(source).asInstanceOf[this.type] - } override def foreachInThicket(op: Tree[T] => Unit): Unit = trees foreach (_.foreachInThicket(op)) @@ -1043,20 +959,17 @@ object Trees { override def withSpan(span: Span): this.type = mapElems(_.withSpan(span)).asInstanceOf[this.type] - } - class EmptyTree[T <: Untyped] extends Thicket(Nil)(NoSource) { + class EmptyTree[T <: Untyped] extends Thicket(Nil)(NoSource): // assert(uniqueId != 1492) override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyTree") - } class EmptyValDef[T <: Untyped] extends ValDef[T]( - nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T])(NoSource) with WithoutTypeOrPos[T] { + nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T])(NoSource) with WithoutTypeOrPos[T]: myTpe = NoType.asInstanceOf[T] setMods(untpd.Modifiers(PrivateLocal)) override def isEmpty: Boolean = true override def withSpan(span: Span) = throw AssertionError("Cannot change span of EmptyValDef") - } @sharable val theEmptyTree = new EmptyTree[Type]() @sharable val theEmptyValDef = new EmptyValDef[Type]() @@ -1064,29 +977,25 @@ object Trees { def genericEmptyValDef[T <: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]] def genericEmptyTree[T <: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]] - def flatten[T <: Untyped](trees: List[Tree[T]]): List[Tree[T]] = { + def flatten[T <: Untyped](trees: List[Tree[T]]): List[Tree[T]] = def recur(buf: ListBuffer[Tree[T]] | Null, remaining: List[Tree[T]]): ListBuffer[Tree[T]] | Null = - remaining match { + remaining match case Thicket(elems) :: remaining1 => var buf1 = buf - if (buf1 == null) { + if (buf1 == null) buf1 = new ListBuffer[Tree[T]] var scanned = trees - while (scanned `ne` remaining) { + while (scanned `ne` remaining) buf1 += scanned.head scanned = scanned.tail - } - } recur(recur(buf1, elems), remaining1) case tree :: remaining1 => if (buf != null) buf += tree recur(buf, remaining1) case nil => buf - } val buf = recur(null, trees) if (buf != null) buf.toList else trees - } // ----- Lazy trees and tree sequences @@ -1201,10 +1110,9 @@ object Trees { def Thicket(x1: Tree, x2: Tree, x3: Tree)(implicit src: SourceFile): Thicket = new Thicket(x1 :: x2 :: x3 :: Nil) def Thicket(xs: List[Tree])(implicit src: SourceFile) = new Thicket(xs) - def flatTree(xs: List[Tree])(implicit src: SourceFile): Tree = flatten(xs) match { + def flatTree(xs: List[Tree])(implicit src: SourceFile): Tree = flatten(xs) match case x :: Nil => x case ys => Thicket(ys) - } // ----- Helper classes for copying, transforming, accumulating ----------------- @@ -1217,7 +1125,7 @@ object Trees { * These are exactly those methods that are overridden in TypedTreeCopier * so that they selectively retype themselves. Retyping needs a context. */ - abstract class TreeCopier { + abstract class TreeCopier: protected def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] protected def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[T] @@ -1232,204 +1140,157 @@ object Trees { Stats.record(s"TreeCopier.finalize/${tree.getClass == copied.getClass}") postProcess(tree, copied.withSpan(tree.span).withAttachmentsFrom(tree)) - def Ident(tree: Tree)(name: Name)(using Context): Ident = tree match { + def Ident(tree: Tree)(name: Name)(using Context): Ident = tree match case tree: Ident if name == tree.name => tree case _ => finalize(tree, untpd.Ident(name)(sourceFile(tree))) - } - def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = tree match { + def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = tree match case tree: SelectWithSig => if ((qualifier eq tree.qualifier) && (name == tree.name)) tree else finalize(tree, SelectWithSig(qualifier, name, tree.sig)(sourceFile(tree))) case tree: Select if (qualifier eq tree.qualifier) && (name == tree.name) => tree case _ => finalize(tree, untpd.Select(qualifier, name)(sourceFile(tree))) - } /** Copy Ident or Select trees */ - def Ref(tree: RefTree)(name: Name)(using Context): RefTree = tree match { + def Ref(tree: RefTree)(name: Name)(using Context): RefTree = tree match case Ident(_) => Ident(tree)(name) case Select(qual, _) => Select(tree)(qual, name) - } - def This(tree: Tree)(qual: untpd.Ident)(using Context): This = tree match { + def This(tree: Tree)(qual: untpd.Ident)(using Context): This = tree match case tree: This if (qual eq tree.qual) => tree case _ => finalize(tree, untpd.This(qual)(sourceFile(tree))) - } - def Super(tree: Tree)(qual: Tree, mix: untpd.Ident)(using Context): Super = tree match { + def Super(tree: Tree)(qual: Tree, mix: untpd.Ident)(using Context): Super = tree match case tree: Super if (qual eq tree.qual) && (mix eq tree.mix) => tree case _ => finalize(tree, untpd.Super(qual, mix)(sourceFile(tree))) - } - def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = tree match { + def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = tree match case tree: Apply if (fun eq tree.fun) && (args eq tree.args) => tree case _ => finalize(tree, untpd.Apply(fun, args)(sourceFile(tree))) //.ensuring(res => res.uniqueId != 2213, s"source = $tree, ${tree.uniqueId}, ${tree.span}") - } - def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = tree match { + def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = tree match case tree: TypeApply if (fun eq tree.fun) && (args eq tree.args) => tree case _ => finalize(tree, untpd.TypeApply(fun, args)(sourceFile(tree))) - } - def Literal(tree: Tree)(const: Constant)(using Context): Literal = tree match { + def Literal(tree: Tree)(const: Constant)(using Context): Literal = tree match case tree: Literal if const == tree.const => tree case _ => finalize(tree, untpd.Literal(const)(sourceFile(tree))) - } - def New(tree: Tree)(tpt: Tree)(using Context): New = tree match { + def New(tree: Tree)(tpt: Tree)(using Context): New = tree match case tree: New if (tpt eq tree.tpt) => tree case _ => finalize(tree, untpd.New(tpt)(sourceFile(tree))) - } - def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = tree match { + def Typed(tree: Tree)(expr: Tree, tpt: Tree)(using Context): Typed = tree match case tree: Typed if (expr eq tree.expr) && (tpt eq tree.tpt) => tree case tree => finalize(tree, untpd.Typed(expr, tpt)(sourceFile(tree))) - } - def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = tree match { + def NamedArg(tree: Tree)(name: Name, arg: Tree)(using Context): NamedArg = tree match case tree: NamedArg if (name == tree.name) && (arg eq tree.arg) => tree case _ => finalize(tree, untpd.NamedArg(name, arg)(sourceFile(tree))) - } - def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = tree match { + def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = tree match case tree: Assign if (lhs eq tree.lhs) && (rhs eq tree.rhs) => tree case _ => finalize(tree, untpd.Assign(lhs, rhs)(sourceFile(tree))) - } - def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = tree match { + def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = tree match case tree: Block if (stats eq tree.stats) && (expr eq tree.expr) => tree case _ => finalize(tree, untpd.Block(stats, expr)(sourceFile(tree))) - } - def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = tree match { + def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = tree match case tree: If if (cond eq tree.cond) && (thenp eq tree.thenp) && (elsep eq tree.elsep) => tree case tree: InlineIf => finalize(tree, untpd.InlineIf(cond, thenp, elsep)(sourceFile(tree))) case _ => finalize(tree, untpd.If(cond, thenp, elsep)(sourceFile(tree))) - } - def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = tree match { + def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = tree match case tree: Closure if (env eq tree.env) && (meth eq tree.meth) && (tpt eq tree.tpt) => tree case _ => finalize(tree, untpd.Closure(env, meth, tpt)(sourceFile(tree))) - } - def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = tree match { + def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = tree match case tree: Match if (selector eq tree.selector) && (cases eq tree.cases) => tree case tree: InlineMatch => finalize(tree, untpd.InlineMatch(selector, cases)(sourceFile(tree))) case _ => finalize(tree, untpd.Match(selector, cases)(sourceFile(tree))) - } - def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = tree match { + def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = tree match case tree: CaseDef if (pat eq tree.pat) && (guard eq tree.guard) && (body eq tree.body) => tree case _ => finalize(tree, untpd.CaseDef(pat, guard, body)(sourceFile(tree))) - } - def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = tree match { + def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = tree match case tree: Labeled if (bind eq tree.bind) && (expr eq tree.expr) => tree case _ => finalize(tree, untpd.Labeled(bind, expr)(sourceFile(tree))) - } - def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = tree match { + def Return(tree: Tree)(expr: Tree, from: Tree)(using Context): Return = tree match case tree: Return if (expr eq tree.expr) && (from eq tree.from) => tree case _ => finalize(tree, untpd.Return(expr, from)(sourceFile(tree))) - } - def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = tree match { + def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = tree match case tree: WhileDo if (cond eq tree.cond) && (body eq tree.body) => tree case _ => finalize(tree, untpd.WhileDo(cond, body)(sourceFile(tree))) - } - def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = tree match { + def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = tree match case tree: Try if (expr eq tree.expr) && (cases eq tree.cases) && (finalizer eq tree.finalizer) => tree case _ => finalize(tree, untpd.Try(expr, cases, finalizer)(sourceFile(tree))) - } - def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = tree match { + def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = tree match case tree: JavaSeqLiteral => if ((elems eq tree.elems) && (elemtpt eq tree.elemtpt)) tree else finalize(tree, untpd.JavaSeqLiteral(elems, elemtpt)) case tree: SeqLiteral if (elems eq tree.elems) && (elemtpt eq tree.elemtpt) => tree case _ => finalize(tree, untpd.SeqLiteral(elems, elemtpt)(sourceFile(tree))) - } - def Inlined(tree: Tree)(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = tree match { + def Inlined(tree: Tree)(call: tpd.Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = tree match case tree: Inlined if (call eq tree.call) && (bindings eq tree.bindings) && (expansion eq tree.expansion) => tree case _ => finalize(tree, untpd.Inlined(call, bindings, expansion)(sourceFile(tree))) - } - def Quote(tree: Tree)(body: Tree, tags: List[Tree])(using Context): Quote = tree match { + def Quote(tree: Tree)(body: Tree, tags: List[Tree])(using Context): Quote = tree match case tree: Quote if (body eq tree.body) && (tags eq tree.tags) => tree case _ => finalize(tree, untpd.Quote(body, tags)(sourceFile(tree))) - } - def Splice(tree: Tree)(expr: Tree)(using Context): Splice = tree match { + def Splice(tree: Tree)(expr: Tree)(using Context): Splice = tree match case tree: Splice if (expr eq tree.expr) => tree case _ => finalize(tree, untpd.Splice(expr)(sourceFile(tree))) - } - def SplicePattern(tree: Tree)(body: Tree, args: List[Tree])(using Context): SplicePattern = tree match { + def SplicePattern(tree: Tree)(body: Tree, args: List[Tree])(using Context): SplicePattern = tree match case tree: SplicePattern if (body eq tree.body) && (args eq tree.args) => tree case _ => finalize(tree, untpd.SplicePattern(body, args)(sourceFile(tree))) - } - def SingletonTypeTree(tree: Tree)(ref: Tree)(using Context): SingletonTypeTree = tree match { + def SingletonTypeTree(tree: Tree)(ref: Tree)(using Context): SingletonTypeTree = tree match case tree: SingletonTypeTree if (ref eq tree.ref) => tree case _ => finalize(tree, untpd.SingletonTypeTree(ref)(sourceFile(tree))) - } - def RefinedTypeTree(tree: Tree)(tpt: Tree, refinements: List[Tree])(using Context): RefinedTypeTree = tree match { + def RefinedTypeTree(tree: Tree)(tpt: Tree, refinements: List[Tree])(using Context): RefinedTypeTree = tree match case tree: RefinedTypeTree if (tpt eq tree.tpt) && (refinements eq tree.refinements) => tree case _ => finalize(tree, untpd.RefinedTypeTree(tpt, refinements)(sourceFile(tree))) - } - def AppliedTypeTree(tree: Tree)(tpt: Tree, args: List[Tree])(using Context): AppliedTypeTree = tree match { + def AppliedTypeTree(tree: Tree)(tpt: Tree, args: List[Tree])(using Context): AppliedTypeTree = tree match case tree: AppliedTypeTree if (tpt eq tree.tpt) && (args eq tree.args) => tree case _ => finalize(tree, untpd.AppliedTypeTree(tpt, args)(sourceFile(tree))) - } - def LambdaTypeTree(tree: Tree)(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = tree match { + def LambdaTypeTree(tree: Tree)(tparams: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = tree match case tree: LambdaTypeTree if (tparams eq tree.tparams) && (body eq tree.body) => tree case _ => finalize(tree, untpd.LambdaTypeTree(tparams, body)(sourceFile(tree))) - } - def TermLambdaTypeTree(tree: Tree)(params: List[ValDef], body: Tree)(using Context): TermLambdaTypeTree = tree match { + def TermLambdaTypeTree(tree: Tree)(params: List[ValDef], body: Tree)(using Context): TermLambdaTypeTree = tree match case tree: TermLambdaTypeTree if (params eq tree.params) && (body eq tree.body) => tree case _ => finalize(tree, untpd.TermLambdaTypeTree(params, body)(sourceFile(tree))) - } - def MatchTypeTree(tree: Tree)(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = tree match { + def MatchTypeTree(tree: Tree)(bound: Tree, selector: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = tree match case tree: MatchTypeTree if (bound eq tree.bound) && (selector eq tree.selector) && (cases eq tree.cases) => tree case _ => finalize(tree, untpd.MatchTypeTree(bound, selector, cases)(sourceFile(tree))) - } - def ByNameTypeTree(tree: Tree)(result: Tree)(using Context): ByNameTypeTree = tree match { + def ByNameTypeTree(tree: Tree)(result: Tree)(using Context): ByNameTypeTree = tree match case tree: ByNameTypeTree if (result eq tree.result) => tree case _ => finalize(tree, untpd.ByNameTypeTree(result)(sourceFile(tree))) - } - def TypeBoundsTree(tree: Tree)(lo: Tree, hi: Tree, alias: Tree)(using Context): TypeBoundsTree = tree match { + def TypeBoundsTree(tree: Tree)(lo: Tree, hi: Tree, alias: Tree)(using Context): TypeBoundsTree = tree match case tree: TypeBoundsTree if (lo eq tree.lo) && (hi eq tree.hi) && (alias eq tree.alias) => tree case _ => finalize(tree, untpd.TypeBoundsTree(lo, hi, alias)(sourceFile(tree))) - } - def Bind(tree: Tree)(name: Name, body: Tree)(using Context): Bind = tree match { + def Bind(tree: Tree)(name: Name, body: Tree)(using Context): Bind = tree match case tree: Bind if (name eq tree.name) && (body eq tree.body) => tree case _ => finalize(tree, untpd.Bind(name, body)(sourceFile(tree))) - } - def Alternative(tree: Tree)(trees: List[Tree])(using Context): Alternative = tree match { + def Alternative(tree: Tree)(trees: List[Tree])(using Context): Alternative = tree match case tree: Alternative if (trees eq tree.trees) => tree case _ => finalize(tree, untpd.Alternative(trees)(sourceFile(tree))) - } - def UnApply(tree: Tree)(fun: Tree, implicits: List[Tree], patterns: List[Tree])(using Context): UnApply = tree match { + def UnApply(tree: Tree)(fun: Tree, implicits: List[Tree], patterns: List[Tree])(using Context): UnApply = tree match case tree: UnApply if (fun eq tree.fun) && (implicits eq tree.implicits) && (patterns eq tree.patterns) => tree case _ => finalize(tree, untpd.UnApply(fun, implicits, patterns)(sourceFile(tree))) - } - def ValDef(tree: Tree)(name: TermName, tpt: Tree, rhs: LazyTree)(using Context): ValDef = tree match { + def ValDef(tree: Tree)(name: TermName, tpt: Tree, rhs: LazyTree)(using Context): ValDef = tree match case tree: ValDef if (name == tree.name) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree case _ => finalize(tree, untpd.ValDef(name, tpt, rhs)(sourceFile(tree))) - } - def DefDef(tree: Tree)(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(using Context): DefDef = tree match { + def DefDef(tree: Tree)(name: TermName, paramss: List[ParamClause], tpt: Tree, rhs: LazyTree)(using Context): DefDef = tree match case tree: DefDef if (name == tree.name) && (paramss eq tree.paramss) && (tpt eq tree.tpt) && (rhs eq tree.unforcedRhs) => tree case _ => finalize(tree, untpd.DefDef(name, paramss, tpt, rhs)(sourceFile(tree))) - } - def TypeDef(tree: Tree)(name: TypeName, rhs: Tree)(using Context): TypeDef = tree match { + def TypeDef(tree: Tree)(name: TypeName, rhs: Tree)(using Context): TypeDef = tree match case tree: TypeDef if (name == tree.name) && (rhs eq tree.rhs) => tree case _ => finalize(tree, untpd.TypeDef(name, rhs)(sourceFile(tree))) - } - def Template(tree: Tree)(constr: DefDef, parents: List[Tree], derived: List[untpd.Tree], self: ValDef, body: LazyTreeList)(using Context): Template = tree match { + def Template(tree: Tree)(constr: DefDef, parents: List[Tree], derived: List[untpd.Tree], self: ValDef, body: LazyTreeList)(using Context): Template = tree match case tree: Template if (constr eq tree.constr) && (parents eq tree.parents) && (derived eq tree.derived) && (self eq tree.self) && (body eq tree.unforcedBody) => tree case tree => finalize(tree, untpd.Template(constr, parents, derived, self, body)(sourceFile(tree))) - } - def Import(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = tree match { + def Import(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Import = tree match case tree: Import if (expr eq tree.expr) && (selectors eq tree.selectors) => tree case _ => finalize(tree, untpd.Import(expr, selectors)(sourceFile(tree))) - } - def Export(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = tree match { + def Export(tree: Tree)(expr: Tree, selectors: List[untpd.ImportSelector])(using Context): Export = tree match case tree: Export if (expr eq tree.expr) && (selectors eq tree.selectors) => tree case _ => finalize(tree, untpd.Export(expr, selectors)(sourceFile(tree))) - } - def PackageDef(tree: Tree)(pid: RefTree, stats: List[Tree])(using Context): PackageDef = tree match { + def PackageDef(tree: Tree)(pid: RefTree, stats: List[Tree])(using Context): PackageDef = tree match case tree: PackageDef if (pid eq tree.pid) && (stats eq tree.stats) => tree case _ => finalize(tree, untpd.PackageDef(pid, stats)(sourceFile(tree))) - } - def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = tree match { + def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = tree match case tree: Annotated if (arg eq tree.arg) && (annot eq tree.annot) => tree case _ => finalize(tree, untpd.Annotated(arg, annot)(sourceFile(tree))) - } - def Thicket(tree: Tree)(trees: List[Tree])(using Context): Thicket = tree match { + def Thicket(tree: Tree)(trees: List[Tree])(using Context): Thicket = tree match case tree: Thicket if (trees eq tree.trees) => tree case _ => finalize(tree, untpd.Thicket(trees)(sourceFile(tree))) - } - def Hole(tree: Tree)(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree)(using Context): Hole = tree match { + def Hole(tree: Tree)(isTerm: Boolean, idx: Int, args: List[Tree], content: Tree)(using Context): Hole = tree match case tree: Hole if isTerm == tree.isTerm && idx == tree.idx && args.eq(tree.args) && content.eq(tree.content) && content.eq(tree.content) => tree case _ => finalize(tree, untpd.Hole(isTerm, idx, args, content)(sourceFile(tree))) - } // Copier methods with default arguments; these demand that the original tree // is of the same class as the copy. We only include trees with more than 2 elements here. @@ -1454,7 +1315,6 @@ object Trees { def Hole(tree: Hole)(isTerm: Boolean = tree.isTerm, idx: Int = tree.idx, args: List[Tree] = tree.args, content: Tree = tree.content)(using Context): Hole = Hole(tree: Tree)(isTerm, idx, args, content) - } /** Hook to indicate that a transform of some subtree should be skipped */ protected def skipTransform(tree: Tree)(using Context): Boolean = false @@ -1487,11 +1347,11 @@ object Trees { sourced abstract class TreeMap(val cpy: TreeCopier = inst.cpy) { self => - def transform(tree: Tree)(using Context): Tree = { - inContext(transformCtx(tree)) { + def transform(tree: Tree)(using Context): Tree = + inContext(transformCtx(tree)): Stats.record(s"TreeMap.transform/$getClass") if (skipTransform(tree)) tree - else tree match { + else tree match case Ident(name) => tree case Select(qualifier, name) => @@ -1593,9 +1453,6 @@ object Trees { cpy.Hole(tree)(isTerm, idx, transform(args), transform(content)) case _ => transformMoreCases(tree) - } - } - } def transformStats(trees: List[Tree], exprOwner: Symbol)(using Context): List[Tree] = transform(trees) @@ -1612,10 +1469,9 @@ object Trees { def transformParamss(paramss: List[ParamClause])(using Context): List[ParamClause] = paramss.mapConserve(transformParams) - protected def transformMoreCases(tree: Tree)(using Context): Tree = { + protected def transformMoreCases(tree: Tree)(using Context): Tree = assert(ctx.reporter.errorsReported) tree - } } abstract class TreeAccumulator[X] { self => @@ -1631,9 +1487,9 @@ object Trees { def foldOver(x: X, tree: Tree)(using Context): X = if (tree.source != ctx.source && tree.source.exists) foldOver(x, tree)(using ctx.withSource(tree.source)) - else { + else Stats.record(s"TreeAccumulator.foldOver/$getClass") - tree match { + tree match case Ident(name) => x case Select(qualifier, name) => @@ -1687,13 +1543,11 @@ object Trees { case AppliedTypeTree(tpt, args) => this(this(x, tpt), args) case LambdaTypeTree(tparams, body) => - inContext(localCtx(tree)) { + inContext(localCtx(tree)): this(this(x, tparams), body) - } case TermLambdaTypeTree(params, body) => - inContext(localCtx(tree)) { + inContext(localCtx(tree)): this(this(x, params), body) - } case MatchTypeTree(bound, selector, cases) => this(this(this(x, bound), selector), cases) case ByNameTypeTree(result) => @@ -1707,17 +1561,14 @@ object Trees { case UnApply(fun, implicits, patterns) => this(this(this(x, fun), implicits), patterns) case tree @ ValDef(_, tpt, _) => - inContext(localCtx(tree)) { + inContext(localCtx(tree)): this(this(x, tpt), tree.rhs) - } case tree @ DefDef(_, paramss, tpt, _) => - inContext(localCtx(tree)) { + inContext(localCtx(tree)): this(this(paramss.foldLeft(x)(apply), tpt), tree.rhs) - } case TypeDef(_, rhs) => - inContext(localCtx(tree)) { + inContext(localCtx(tree)): this(x, rhs) - } case tree @ Template(constr, _, self, _) if tree.derived.isEmpty => this(this(this(this(x, constr), tree.parents), self), tree.body) case Import(expr, _) => @@ -1740,10 +1591,8 @@ object Trees { this(this(x, args), content) case _ => foldMoreCases(x, tree) - } - } - def foldMoreCases(x: X, tree: Tree)(using Context): X = { + def foldMoreCases(x: X, tree: Tree)(using Context): X = assert(ctx.reporter.hasUnreportedErrors || ctx.reporter.errorsReported || ctx.mode.is(Mode.Interactive), tree) @@ -1752,41 +1601,35 @@ object Trees { // The IDE can still traverse inside such trees, either in the run where errors // are reported, or in subsequent ones. x - } } - abstract class TreeTraverser extends TreeAccumulator[Unit] { + abstract class TreeTraverser extends TreeAccumulator[Unit]: def traverse(tree: Tree)(using Context): Unit def traverse(trees: List[Tree])(using Context) = apply((), trees) def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) - } /** Fold `f` over all tree nodes, in depth-first, prefix order */ - class DeepFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { + class DeepFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X]: def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) - } /** Fold `f` over all tree nodes, in depth-first, prefix order, but don't visit * subtrees where `f` returns a different result for the root, i.e. `f(x, root) ne x`. */ - class ShallowFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] { - def apply(x: X, tree: Tree)(using Context): X = { + class ShallowFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X]: + def apply(x: X, tree: Tree)(using Context): X = val x1 = f(x, tree) if (x1.asInstanceOf[AnyRef] ne x.asInstanceOf[AnyRef]) x1 else foldOver(x1, tree) - } - } def rename(tree: NameTree, newName: Name)(using Context): tree.ThisTree[T] = { - tree match { + tree match case tree: Ident => cpy.Ident(tree)(newName) case tree: Select => cpy.Select(tree)(tree.qualifier, newName) case tree: Bind => cpy.Bind(tree)(newName, tree.body) case tree: ValDef => cpy.ValDef(tree)(name = newName.asTermName) case tree: DefDef => cpy.DefDef(tree)(name = newName.asTermName) case tree: TypeDef => cpy.TypeDef(tree)(name = newName.asTypeName) - } }.asInstanceOf[tree.ThisTree[T]] object TypeDefs: @@ -1842,7 +1685,7 @@ object Trees { */ def applyOverloaded( receiver: tpd.Tree, method: TermName, args: List[Tree], targs: List[Type], - expectedType: Type)(using parentCtx: Context): tpd.Tree = { + expectedType: Type)(using parentCtx: Context): tpd.Tree = given ctx: Context = parentCtx.retractMode(Mode.ImplicitsEnabled) import dotty.tools.dotc.ast.tpd.TreeOps @@ -1853,11 +1696,10 @@ object Trees { overload.println(i"members = ${receiver.tpe.decls}") report.error(em"no member $receiver . $method", receiver.srcPos) val selected = - if (denot.isOverloaded) { - def typeParamCount(tp: Type) = tp.widen match { + if (denot.isOverloaded) + def typeParamCount(tp: Type) = tp.widen match case tp: PolyType => tp.paramInfos.length case _ => 0 - } val allAlts = denot.alternatives .map(denot => TermRef(receiver.tpe, denot.symbol)) .filter(tr => typeParamCount(tr) == targs.length) @@ -1872,20 +1714,15 @@ object Trees { i"all alternatives: ${allAlts.map(_.symbol.showDcl).mkString(", ")}\n" + i"matching alternatives: ${alternatives.map(_.symbol.showDcl).mkString(", ")}.") // this is parsed from bytecode tree. there's nothing user can do about it alternatives.head - } else TermRef(receiver.tpe, denot.symbol) val fun = receiver.select(selected).appliedToTypes(targs) val apply = untpd.Apply(fun, args) typer.ApplyTo(apply, fun, selected, proto, expectedType) - } - def resolveConstructor(atp: Type, args: List[Tree])(using Context): tpd.Tree = { + def resolveConstructor(atp: Type, args: List[Tree])(using Context): tpd.Tree = val targs = atp.argTypes - withoutMode(Mode.PatternOrTypeBits) { + withoutMode(Mode.PatternOrTypeBits): applyOverloaded(tpd.New(atp.typeConstructor), nme.CONSTRUCTOR, args, targs, atp) - } - } } -} diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index 76e16cc00a90..0c0ae09306a7 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -20,7 +20,7 @@ import scala.annotation.tailrec import scala.collection.mutable.ListBuffer /** Some creators for typed trees */ -object tpd extends Trees.Instance[Type] with TypedTreeInfo { +object tpd extends Trees.Instance[Type] with TypedTreeInfo: private def ta(using Context) = ctx.typeAssigner @@ -89,14 +89,13 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** Join `stats` in front of `expr` creating a new block if necessary */ def seq(stats: List[Tree], expr: Tree)(using Context): Tree = if (stats.isEmpty) expr - else expr match { + else expr match case Block(_, _: Closure) => Block(stats, expr) // leave closures in their own block case Block(estats, eexpr) => cpy.Block(expr)(stats ::: estats, eexpr).withType(ta.avoidingType(eexpr, stats)) case _ => Block(stats, expr) - } def If(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = ta.assignType(untpd.If(cond, thenp, elsep), thenp, elsep) @@ -118,7 +117,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * where the closure's type is the target type of the expression (FunctionN, unless * otherwise specified). */ - def Closure(meth: TermSymbol, rhsFn: List[List[Tree]] => Tree, targs: List[Tree] = Nil, targetType: Type = NoType)(using Context): Block = { + def Closure(meth: TermSymbol, rhsFn: List[List[Tree]] => Tree, targs: List[Tree] = Nil, targetType: Type = NoType)(using Context): Block = val targetTpt = if (targetType.exists) TypeTree(targetType) else EmptyTree val call = if (targs.isEmpty) Ident(TermRef(NoPrefix, meth)) @@ -126,13 +125,11 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { Block( DefDef(meth, rhsFn) :: Nil, Closure(Nil, call, targetTpt)) - } /** A closure whose anonymous function has the given method type */ - def Lambda(tpe: MethodType, rhsFn: List[Tree] => Tree)(using Context): Block = { + def Lambda(tpe: MethodType, rhsFn: List[Tree] => Tree)(using Context): Block = val meth = newAnonFun(ctx.owner, tpe) Closure(meth, tss => rhsFn(tss.head).changeOwner(ctx.owner, meth)) - } def CaseDef(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = ta.assignType(untpd.CaseDef(pat, guard, body), pat, body) @@ -213,10 +210,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def Alternative(trees: List[Tree])(using Context): Alternative = ta.assignType(untpd.Alternative(trees), trees) - def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree], proto: Type)(using Context): UnApply = { + def UnApply(fun: Tree, implicits: List[Tree], patterns: List[Tree], proto: Type)(using Context): UnApply = assert(fun.isInstanceOf[RefTree] || fun.isInstanceOf[GenericApply]) ta.assignType(untpd.UnApply(fun, implicits, patterns), proto) - } def ValDef(sym: TermSymbol, rhs: LazyTree = EmptyTree, inferred: Boolean = false)(using Context): ValDef = ta.assignType(untpd.ValDef(sym.name, TypeTree(sym.info, inferred), rhs), sym) @@ -307,26 +303,23 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def TypeDef(sym: TypeSymbol)(using Context): TypeDef = ta.assignType(untpd.TypeDef(sym.name, TypeTree(sym.info)), sym) - def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], superArgs: List[Tree] = Nil)(using Context): TypeDef = { + def ClassDef(cls: ClassSymbol, constr: DefDef, body: List[Tree], superArgs: List[Tree] = Nil)(using Context): TypeDef = val firstParent :: otherParents = cls.info.parents: @unchecked val superRef = if (cls.is(Trait)) TypeTree(firstParent) - else { - def isApplicable(ctpe: Type): Boolean = ctpe match { + else + def isApplicable(ctpe: Type): Boolean = ctpe match case ctpe: PolyType => isApplicable(ctpe.instantiate(firstParent.argTypes)) case ctpe: MethodType => (superArgs corresponds ctpe.paramInfos)(_.tpe <:< _) case _ => false - } val constr = firstParent.decl(nme.CONSTRUCTOR).suchThat(constr => isApplicable(constr.info)) New(firstParent, constr.symbol.asTerm, superArgs) - } ClassDefWithParents(cls, constr, superRef :: otherParents.map(TypeTree(_)), body) - } - def ClassDefWithParents(cls: ClassSymbol, constr: DefDef, parents: List[Tree], body: List[Tree])(using Context): TypeDef = { + def ClassDefWithParents(cls: ClassSymbol, constr: DefDef, parents: List[Tree], body: List[Tree])(using Context): TypeDef = val selfType = if (cls.classInfo.selfInfo ne NoType) ValDef(newSelfSym(cls)) else EmptyValDef @@ -342,7 +335,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { val impl = untpd.Template(constr, parents, Nil, selfType, newTypeParams ++ body) .withType(localDummy.termRef) ta.assignType(untpd.TypeDef(cls.name, impl), cls) - } /** An anonymous class * @@ -354,18 +346,16 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * The class has the same owner as the first function in `fns`. * Its position is the union of all functions in `fns`. */ - def AnonClass(parents: List[Type], fns: List[TermSymbol], methNames: List[TermName])(using Context): Block = { + def AnonClass(parents: List[Type], fns: List[TermSymbol], methNames: List[TermName])(using Context): Block = AnonClass(fns.head.owner, parents, fns.map(_.span).reduceLeft(_ union _)) { cls => - def forwarder(fn: TermSymbol, name: TermName) = { + def forwarder(fn: TermSymbol, name: TermName) = val fwdMeth = fn.copy(cls, name, Synthetic | Method | Final).entered.asTerm for overridden <- fwdMeth.allOverriddenSymbols do if overridden.is(Extension) then fwdMeth.setFlag(Extension) if !overridden.is(Deferred) then fwdMeth.setFlag(Override) DefDef(fwdMeth, ref(fn).appliedToArgss(_)) - } fns.lazyZip(methNames).map(forwarder) } - } /** An anonymous class * @@ -375,10 +365,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { */ def AnonClass(owner: Symbol, parents: List[Type], coord: Coord)(body: ClassSymbol => List[Tree])(using Context): Block = val parents1 = - if (parents.head.classSymbol.is(Trait)) { + if (parents.head.classSymbol.is(Trait)) val head = parents.head.parents.head if (head.isRef(defn.AnyClass)) defn.AnyRefType :: parents else head :: parents - } else parents val cls = newNormalizedClassSymbol(owner, tpnme.ANON_CLASS, Synthetic | Final, parents1, coord = coord) val constr = newConstructor(cls, Synthetic, Nil, Nil).entered @@ -402,8 +391,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { // ------ Making references ------------------------------------------------------ - def prefixIsElidable(tp: NamedType)(using Context): Boolean = { - val typeIsElidable = tp.prefix match { + def prefixIsElidable(tp: NamedType)(using Context): Boolean = + val typeIsElidable = tp.prefix match case pre: ThisType => tp.isType || pre.cls.isStaticOwner || @@ -415,16 +404,13 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { pre.symbol.is(Module) && pre.symbol.isStatic case pre => pre `eq` NoPrefix - } typeIsElidable || tp.symbol.is(JavaStatic) || tp.symbol.hasAnnotation(defn.ScalaStaticAnnot) - } - def needsSelect(tp: Type)(using Context): Boolean = tp match { + def needsSelect(tp: Type)(using Context): Boolean = tp match case tp: TermRef => !prefixIsElidable(tp) case _ => false - } def needsIdent(tp: Type)(using Context): Boolean = tp match case tp: TermRef => tp.prefix eq NoPrefix @@ -450,66 +436,57 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def ref(sym: Symbol)(using Context): Tree = ref(NamedType(sym.owner.thisType, sym.name, sym.denot)) - private def followOuterLinks(t: Tree)(using Context) = t match { + private def followOuterLinks(t: Tree)(using Context) = t match case t: This if ctx.erasedTypes && !(t.symbol == ctx.owner.enclosingClass || t.symbol.isStaticOwner) => // after erasure outer paths should be respected ExplicitOuter.OuterOps(ctx).path(toCls = t.tpe.classSymbol) case t => t - } - def singleton(tp: Type, needLoad: Boolean = true)(using Context): Tree = tp.dealias match { + def singleton(tp: Type, needLoad: Boolean = true)(using Context): Tree = tp.dealias match case tp: TermRef => ref(tp, needLoad) case tp: ThisType => This(tp.cls) case tp: SkolemType => singleton(tp.narrow, needLoad) case SuperType(qual, _) => singleton(qual, needLoad) case ConstantType(value) => Literal(value) - } /** A path that corresponds to the given type `tp`. Error if `tp` is not a refinement * of an addressable singleton type. */ - def pathFor(tp: Type)(using Context): Tree = { - def recur(tp: Type): Tree = tp match { + def pathFor(tp: Type)(using Context): Tree = + def recur(tp: Type): Tree = tp match case tp: NamedType => - tp.info match { + tp.info match case TypeAlias(alias) => recur(alias) case _: TypeBounds => EmptyTree case _ => singleton(tp) - } case tp: TypeProxy => recur(tp.superType) case _ => EmptyTree - } - recur(tp).orElse { + recur(tp).orElse: report.error(em"$tp is not an addressable singleton type") TypeTree(tp) - } - } /** A tree representing a `newXYZArray` operation of the right * kind for the given element type in `elemTpe`. No type arguments or * `length` arguments are given. */ - def newArray(elemTpe: Type, returnTpe: Type, span: Span, dims: JavaSeqLiteral)(using Context): Tree = { + def newArray(elemTpe: Type, returnTpe: Type, span: Span, dims: JavaSeqLiteral)(using Context): Tree = val elemClass = elemTpe.classSymbol def newArr = ref(defn.DottyArraysModule).select(defn.newArrayMethod).withSpan(span) - if (!ctx.erasedTypes) { + if (!ctx.erasedTypes) assert(!TypeErasure.isGeneric(elemTpe), elemTpe) //needs to be done during typer. See Applications.convertNewGenericArray newArr.appliedToTypeTrees(TypeTree(returnTpe) :: Nil).appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) - } else // after erasure newArr.appliedToTermArgs(clsOf(elemTpe) :: clsOf(returnTpe) :: dims :: Nil).withSpan(span) - } /** The wrapped array method name for an array of type elemtp */ - def wrapArrayMethodName(elemtp: Type)(using Context): TermName = { + def wrapArrayMethodName(elemtp: Type)(using Context): TermName = val elemCls = elemtp.classSymbol if (elemCls.isPrimitiveValueClass) nme.wrapXArray(elemCls.name) else if (elemCls.derivesFrom(defn.ObjectClass) && !elemCls.isNotRuntimeClass) nme.wrapRefArray else nme.genericWrapArray - } /** A tree representing a `wrapXYZArray(tree)` operation of the right * kind for the given element type in `elemTpe`. @@ -528,14 +505,13 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { New(tp, tp.dealias.typeSymbol.primaryConstructor.asTerm, args) /** new C(args), calling given constructor `constr` of C */ - def New(tp: Type, constr: TermSymbol, args: List[Tree])(using Context): Apply = { + def New(tp: Type, constr: TermSymbol, args: List[Tree])(using Context): Apply = val targs = tp.argTypes val tycon = tp.typeConstructor New(tycon) .select(TermRef(tycon, constr)) .appliedToTypes(targs) .appliedToTermArgs(args) - } /** An object def * @@ -563,19 +539,18 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * the RHS of a method contains a class owned by the method, this would be * an error.) */ - def ModuleDef(sym: TermSymbol, body: List[Tree])(using Context): tpd.Thicket = { + def ModuleDef(sym: TermSymbol, body: List[Tree])(using Context): tpd.Thicket = val modcls = sym.moduleClass.asClass val constrSym = modcls.primaryConstructor orElse newDefaultConstructor(modcls).entered val constr = DefDef(constrSym.asTerm, EmptyTree) val clsdef = ClassDef(modcls, constr, body) val valdef = ValDef(sym, New(modcls.typeRef).select(constrSym).appliedToNone) Thicket(valdef, clsdef) - } /** A `_` with given type */ def Underscore(tp: Type)(using Context): Ident = untpd.Ident(nme.WILDCARD).withType(tp) - def defaultValue(tpe: Type)(using Context): Tree = { + def defaultValue(tpe: Type)(using Context): Tree = val tpw = tpe.widen if (tpw isRef defn.IntClass) Literal(Constant(0)) @@ -587,19 +562,16 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { else if (tpw isRef defn.ByteClass) Literal(Constant(0.toByte)) else if (tpw isRef defn.ShortClass) Literal(Constant(0.toShort)) else nullLiteral.select(defn.Any_asInstanceOf).appliedToType(tpe) - } - private class FindLocalDummyAccumulator(cls: ClassSymbol)(using Context) extends TreeAccumulator[Symbol] { + private class FindLocalDummyAccumulator(cls: ClassSymbol)(using Context) extends TreeAccumulator[Symbol]: def apply(sym: Symbol, tree: Tree)(using Context) = if (sym.exists) sym - else if (tree.isDef) { + else if (tree.isDef) val owner = tree.symbol.owner if (owner.isLocalDummy && owner.owner == cls) owner else if (owner == cls) foldOver(sym, tree) else sym - } else foldOver(sym, tree) - } /** The owner to be used in a local context when traversing a tree */ def localOwner(tree: Tree)(using Context): Symbol = @@ -614,7 +586,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { val cpyBetweenPhases: TimeTravellingTreeCopier = TimeTravellingTreeCopier() - class TypedTreeCopier extends TreeCopier { + class TypedTreeCopier extends TreeCopier: def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[Type] = copied.withTypeUnchecked(tree.tpe) def postProcess(tree: Tree, copied: untpd.MemberDef): copied.ThisTree[Type] = @@ -622,42 +594,35 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { protected val untpdCpy = untpd.cpy - override def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = { + override def Select(tree: Tree)(qualifier: Tree, name: Name)(using Context): Select = val tree1 = untpdCpy.Select(tree)(qualifier, name) - tree match { + tree match case tree: Select if qualifier.tpe eq tree.qualifier.tpe => tree1.withTypeUnchecked(tree.tpe) case _ => - val tree2: Select = tree.tpe match { + val tree2: Select = tree.tpe match case tpe: NamedType => val qualType = qualifier.tpe.widenIfUnstable if qualType.isExactlyNothing then tree1.withTypeUnchecked(tree.tpe) else tree1.withType(tpe.derivedSelect(qualType)) case _ => tree1.withTypeUnchecked(tree.tpe) - } ConstFold.Select(tree2) - } - } - override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = { + override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = val tree1 = untpdCpy.Apply(tree)(fun, args) - tree match { + tree match case tree: Apply if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => tree1.withTypeUnchecked(tree.tpe) case _ => ta.assignType(tree1, fun, args) - } - } - override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = { + override def TypeApply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): TypeApply = val tree1 = untpdCpy.TypeApply(tree)(fun, args) - tree match { + tree match case tree: TypeApply if (fun.tpe eq tree.fun.tpe) && sameTypes(args, tree.args) => tree1.withTypeUnchecked(tree.tpe) case _ => ta.assignType(tree1, fun, args) - } - } override def Literal(tree: Tree)(const: Constant)(using Context): Literal = ta.assignType(untpdCpy.Literal(tree)(const)) @@ -674,9 +639,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { override def Assign(tree: Tree)(lhs: Tree, rhs: Tree)(using Context): Assign = ta.assignType(untpdCpy.Assign(tree)(lhs, rhs)) - override def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = { + override def Block(tree: Tree)(stats: List[Tree], expr: Tree)(using Context): Block = val tree1 = untpdCpy.Block(tree)(stats, expr) - tree match { + tree match case tree: Block if (expr.tpe eq tree.expr.tpe) && (expr.tpe eq tree.tpe) => // The last guard is a conservative check: if `tree.tpe` is different from `expr.tpe`, then // it was computed from widening `expr.tpe`, and tree transforms might cause `expr.tpe.widen` @@ -686,12 +651,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { // will still be `TermRef(NoPrefix, s)` tree1.withTypeUnchecked(tree.tpe) case _ => ta.assignType(tree1, stats, expr) - } - } - override def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = { + override def If(tree: Tree)(cond: Tree, thenp: Tree, elsep: Tree)(using Context): If = val tree1 = untpdCpy.If(tree)(cond, thenp, elsep) - tree match { + tree match case tree: If if (thenp.tpe eq tree.thenp.tpe) && (elsep.tpe eq tree.elsep.tpe) && ((tree.tpe eq thenp.tpe) || (tree.tpe eq elsep.tpe)) => // The last guard is a conservative check similar to the one done in `Block` above, @@ -700,33 +663,25 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { // in `Block` applies. tree1.withTypeUnchecked(tree.tpe) case _ => ta.assignType(tree1, thenp, elsep) - } - } - override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = { + override def Closure(tree: Tree)(env: List[Tree], meth: Tree, tpt: Tree)(using Context): Closure = val tree1 = untpdCpy.Closure(tree)(env, meth, tpt) - tree match { + tree match case tree: Closure if sameTypes(env, tree.env) && (meth.tpe eq tree.meth.tpe) && (tpt.tpe eq tree.tpt.tpe) => tree1.withTypeUnchecked(tree.tpe) case _ => ta.assignType(tree1, meth, tpt) - } - } - override def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = { + override def Match(tree: Tree)(selector: Tree, cases: List[CaseDef])(using Context): Match = val tree1 = untpdCpy.Match(tree)(selector, cases) - tree match { + tree match case tree: Match if sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) case _ => ta.assignType(tree1, selector, cases) - } - } - override def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = { + override def CaseDef(tree: Tree)(pat: Tree, guard: Tree, body: Tree)(using Context): CaseDef = val tree1 = untpdCpy.CaseDef(tree)(pat, guard, body) - tree match { + tree match case tree: CaseDef if body.tpe eq tree.body.tpe => tree1.withTypeUnchecked(tree.tpe) case _ => ta.assignType(tree1, pat, body) - } - } override def Labeled(tree: Tree)(bind: Bind, expr: Tree)(using Context): Labeled = ta.assignType(untpdCpy.Labeled(tree)(bind, expr)) @@ -737,41 +692,33 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { override def WhileDo(tree: Tree)(cond: Tree, body: Tree)(using Context): WhileDo = ta.assignType(untpdCpy.WhileDo(tree)(cond, body)) - override def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = { + override def Try(tree: Tree)(expr: Tree, cases: List[CaseDef], finalizer: Tree)(using Context): Try = val tree1 = untpdCpy.Try(tree)(expr, cases, finalizer) - tree match { + tree match case tree: Try if (expr.tpe eq tree.expr.tpe) && sameTypes(cases, tree.cases) => tree1.withTypeUnchecked(tree.tpe) case _ => ta.assignType(tree1, expr, cases) - } - } - override def Inlined(tree: Tree)(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = { + override def Inlined(tree: Tree)(call: Tree, bindings: List[MemberDef], expansion: Tree)(using Context): Inlined = val tree1 = untpdCpy.Inlined(tree)(call, bindings, expansion) - tree match { + tree match case tree: Inlined if sameTypes(bindings, tree.bindings) && (expansion.tpe eq tree.expansion.tpe) => tree1.withTypeUnchecked(tree.tpe) case _ => ta.assignType(tree1, bindings, expansion) - } - } - override def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = { + override def SeqLiteral(tree: Tree)(elems: List[Tree], elemtpt: Tree)(using Context): SeqLiteral = val tree1 = untpdCpy.SeqLiteral(tree)(elems, elemtpt) - tree match { + tree match case tree: SeqLiteral if sameTypes(elems, tree.elems) && (elemtpt.tpe eq tree.elemtpt.tpe) => tree1.withTypeUnchecked(tree.tpe) case _ => ta.assignType(tree1, elems, elemtpt) - } - } - override def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = { + override def Annotated(tree: Tree)(arg: Tree, annot: Tree)(using Context): Annotated = val tree1 = untpdCpy.Annotated(tree)(arg, annot) - tree match { + tree match case tree: Annotated if (arg.tpe eq tree.arg.tpe) && (annot eq tree.annot) => tree1.withTypeUnchecked(tree.tpe) case _ => ta.assignType(tree1, arg, annot) - } - } override def If(tree: If)(cond: Tree = tree.cond, thenp: Tree = tree.thenp, elsep: Tree = tree.elsep)(using Context): If = If(tree: Tree)(cond, thenp, elsep) @@ -781,9 +728,8 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { CaseDef(tree: Tree)(pat, guard, body) override def Try(tree: Try)(expr: Tree = tree.expr, cases: List[CaseDef] = tree.cases, finalizer: Tree = tree.finalizer)(using Context): Try = Try(tree: Tree)(expr, cases, finalizer) - } - class TimeTravellingTreeCopier extends TypedTreeCopier { + class TimeTravellingTreeCopier extends TypedTreeCopier: override def Apply(tree: Tree)(fun: Tree, args: List[Tree])(using Context): Apply = tree match case tree: Apply @@ -808,11 +754,10 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { override def Closure(tree: Closure)(env: List[Tree] = tree.env, meth: Tree = tree.meth, tpt: Tree = tree.tpt)(using Context): Closure = Closure(tree: Tree)(env, meth, tpt) - } override def skipTransform(tree: Tree)(using Context): Boolean = tree.tpe.isError - implicit class TreeOps[ThisTree <: tpd.Tree](private val tree: ThisTree) extends AnyVal { + implicit class TreeOps[ThisTree <: tpd.Tree](private val tree: ThisTree) extends AnyVal: def isValue(using Context): Boolean = tree.isTerm && tree.tpe.widen.isValueType @@ -823,10 +768,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def isValueType: Boolean = tree.isType && tree.tpe.isValueType - def isInstantiation: Boolean = tree match { + def isInstantiation: Boolean = tree match case Apply(Select(New(_), nme.CONSTRUCTOR), _) => true case _ => false - } def shallowFold[T](z: T)(op: (T, tpd.Tree) => T)(using Context): T = ShallowFolder(op).apply(z, tree) @@ -843,7 +787,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** Change owner from `from` to `to`. If `from` is a weak owner, also change its * owner to `to`, and continue until a non-weak owner is reached. */ - def changeOwner(from: Symbol, to: Symbol)(using Context): ThisTree = { + def changeOwner(from: Symbol, to: Symbol)(using Context): ThisTree = @tailrec def loop(from: Symbol, froms: List[Symbol], tos: List[Symbol]): ThisTree = if (from.isWeakOwner && !from.owner.isClass) loop(from.owner, from :: froms, to :: tos) @@ -851,50 +795,42 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { //println(i"change owner ${from :: froms}%, % ==> $tos of $tree") TreeTypeMap(oldOwners = from :: froms, newOwners = tos).apply(tree) if (from == to) tree else loop(from, Nil, to :: Nil) - } /** * Set the owner of every definition in this tree which is not itself contained in this * tree to be `newowner` */ - def changeNonLocalOwners(newOwner: Symbol)(using Context): Tree = { - val ownerAcc = new TreeAccumulator[immutable.Set[Symbol]] { - def apply(ss: immutable.Set[Symbol], tree: Tree)(using Context) = tree match { + def changeNonLocalOwners(newOwner: Symbol)(using Context): Tree = + val ownerAcc = new TreeAccumulator[immutable.Set[Symbol]]: + def apply(ss: immutable.Set[Symbol], tree: Tree)(using Context) = tree match case tree: DefTree => val sym = tree.symbol if sym.exists && !sym.owner.is(Package) then ss + sym.owner else ss case _ => foldOver(ss, tree) - } - } val owners = ownerAcc(immutable.Set.empty[Symbol], tree).toList val newOwners = List.fill(owners.size)(newOwner) TreeTypeMap(oldOwners = owners, newOwners = newOwners).apply(tree) - } /** After phase `trans`, set the owner of every definition in this tree that was formerly * owned by `from` to `to`. */ def changeOwnerAfter(from: Symbol, to: Symbol, trans: DenotTransformer)(using Context): ThisTree = - if (ctx.phase == trans.next) { - val traverser = new TreeTraverser { - def traverse(tree: Tree)(using Context) = tree match { + if (ctx.phase == trans.next) + val traverser = new TreeTraverser: + def traverse(tree: Tree)(using Context) = tree match case tree: DefTree => val sym = tree.symbol val prevDenot = atPhase(trans)(sym.denot) - if (prevDenot.effectiveOwner == from.skipWeakOwner) { + if (prevDenot.effectiveOwner == from.skipWeakOwner) val d = sym.copySymDenotation(owner = to) d.installAfter(trans) d.transformAfter(trans, d => if (d.owner eq from) d.copySymDenotation(owner = to) else d) - } if (sym.isWeakOwner) traverseChildren(tree) case _ => traverseChildren(tree) - } - } traverser.traverse(tree) tree - } else atPhase(trans.next)(changeOwnerAfter(from, to, trans)) /** A select node with the given selector name and a computed type */ @@ -915,16 +851,14 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * is in fact the symbol you would get when you select with the symbol's name, * otherwise a data race may occur which would be flagged by -Yno-double-bindings. */ - def select(sym: Symbol)(using Context): Select = { + def select(sym: Symbol)(using Context): Select = val tp = - if (sym.isType) { + if (sym.isType) assert(!sym.is(TypeParam)) TypeRef(tree.tpe, sym.asType) - } else TermRef(tree.tpe, sym.name.asTermName, sym.denot.asSeenFrom(tree.tpe)) untpd.Select(tree, sym.name).withType(tp) - } /** A select node with the given selector name and signature and a computed type */ def selectWithSig(name: Name, sig: Signature, target: Name)(using Context): Tree = @@ -996,7 +930,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { applyOverloaded(tree, nme.EQ, that :: Nil, Nil, defn.BooleanType) /** `tree.isInstanceOf[tp]`, with special treatment of singleton types */ - def isInstance(tp: Type)(using Context): Tree = tp.dealias match { + def isInstance(tp: Type)(using Context): Tree = tp.dealias match case ConstantType(c) if c.tag == StringTag => singleton(tp).equal(tree) case tp: SingletonType => @@ -1006,13 +940,11 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { singleton(tp).equal(tree) case _ => tree.select(defn.Any_isInstanceOf).appliedToType(tp) - } /** tree.asInstanceOf[`tp`] */ - def asInstance(tp: Type)(using Context): Tree = { + def asInstance(tp: Type)(using Context): Tree = assert(tp.isValueType, i"bad cast: $tree.asInstanceOf[$tp]") tree.select(defn.Any_asInstanceOf).appliedToType(tp) - } /** cast tree to `tp`, assuming no exception is raised, i.e the operation is pure */ def cast(tp: Type)(using Context): Tree = cast(TypeTree(tp)) @@ -1033,7 +965,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { else Erasure.Boxing.adaptToType(tree, tp) /** `tree ne null` (might need a cast to be type correct) */ - def testNotNull(using Context): Tree = { + def testNotNull(using Context): Tree = // If the receiver is of type `Nothing` or `Null`, add an ascription or cast // so that the selection succeeds. // e.g. `null.ne(null)` doesn't type, but `(null: AnyRef).ne(null)` does. @@ -1045,7 +977,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { // also need to cast the null literal to AnyRef in explicit nulls val nullLit = if ctx.explicitNulls then nullLiteral.cast(defn.AnyRefType) else nullLiteral receiver.select(defn.Object_ne).appliedTo(nullLit).withSpan(tree.span) - } /** If inititializer tree is `_`, the default value of its type, * otherwise the tree itself. @@ -1064,21 +995,17 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** The translation of `tree = rhs`. * This is either the tree as an assignment, or a setter call. */ - def becomes(rhs: Tree)(using Context): Tree = { + def becomes(rhs: Tree)(using Context): Tree = val sym = tree.symbol - if (sym.is(Method)) { - val setter = sym.setter.orElse { + if (sym.is(Method)) + val setter = sym.setter.orElse: assert(sym.name.isSetterName && sym.info.firstParamTypes.nonEmpty, sym) sym - } - val qual = tree match { + val qual = tree match case id: Ident => desugarIdentPrefix(id) case Select(qual, _) => qual - } qual.select(setter).appliedTo(rhs) - } else Assign(tree, rhs) - } /** tree @annot * @@ -1098,16 +1025,14 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { untpd.Select(tree, OuterSelectName(EmptyTermName, levels)).withType(SkolemType(tp)) /** Replace Inlined nodes and InlineProxy references to underlying arguments */ - def underlyingArgument(using Context): Tree = { - val mapToUnderlying = new MapToUnderlying { + def underlyingArgument(using Context): Tree = + val mapToUnderlying = new MapToUnderlying: /** Should get the rhs of this binding * Returns true if the symbol is a val or def generated by eta-expansion/inline */ override protected def skipLocal(sym: Symbol): Boolean = sym.isOneOf(InlineProxy | Synthetic) - } mapToUnderlying.transform(tree) - } /** Replace Ident nodes references to the underlying tree that defined them */ def underlying(using Context): Tree = MapToUnderlying().transform(tree) @@ -1115,34 +1040,28 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { // --- Higher order traversal methods ------------------------------- /** Apply `f` to each subtree of this tree */ - def foreachSubTree(f: Tree => Unit)(using Context): Unit = { - val traverser = new TreeTraverser { + def foreachSubTree(f: Tree => Unit)(using Context): Unit = + val traverser = new TreeTraverser: def traverse(tree: Tree)(using Context) = foldOver(f(tree), tree) - } traverser.traverse(tree) - } /** Is there a subtree of this tree that satisfies predicate `p`? */ - def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { - val acc = new TreeAccumulator[Boolean] { + def existsSubTree(p: Tree => Boolean)(using Context): Boolean = + val acc = new TreeAccumulator[Boolean]: def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) - } acc(false, tree) - } /** All subtrees of this tree that satisfy predicate `p`. */ - def filterSubTrees(f: Tree => Boolean)(using Context): List[Tree] = { + def filterSubTrees(f: Tree => Boolean)(using Context): List[Tree] = val buf = mutable.ListBuffer[Tree]() foreachSubTree { tree => if (f(tree)) buf += tree } buf.toList - } /** Set this tree as the `defTree` of its symbol and return this tree */ - def setDefTree(using Context): ThisTree = { + def setDefTree(using Context): ThisTree = val sym = tree.symbol if (sym.exists) sym.defTree = tree tree - } def etaExpandCFT(using Context): Tree = def expand(target: Tree, tp: Type)(using Context): Tree = tp match @@ -1158,7 +1077,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { case _ => target expand(tree, tree.tpe.widen) - } extension (trees: List[Tree]) @@ -1253,21 +1171,19 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** Map Inlined nodes, NamedArgs, Blocks with no statements and local references to underlying arguments. * Also drops Inline and Block with no statements. */ - private class MapToUnderlying extends TreeMap { - override def transform(tree: Tree)(using Context): Tree = tree match { + private class MapToUnderlying extends TreeMap: + override def transform(tree: Tree)(using Context): Tree = tree match case tree: Ident if isBinding(tree.symbol) && skipLocal(tree.symbol) => - tree.symbol.defTree match { + tree.symbol.defTree match case defTree: ValOrDefDef => val rhs = defTree.rhs assert(!rhs.isEmpty) transform(rhs) case _ => tree - } case Inlined(_, Nil, arg) => transform(arg) case Block(Nil, arg) => transform(arg) case NamedArg(_, arg) => transform(arg) case tree => super.transform(tree) - } /** Should get the rhs of this binding */ protected def skipLocal(sym: Symbol): Boolean = true @@ -1276,26 +1192,23 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { private def isBinding(sym: Symbol)(using Context): Boolean = sym.isTerm && !sym.is(Param) && !sym.owner.isClass && !(sym.is(Method) && sym.info.isInstanceOf[MethodOrPoly]) // if is a method it is parameterless - } extension (xs: List[tpd.Tree]) - def tpes: List[Type] = xs match { + def tpes: List[Type] = xs match case x :: xs1 => x.tpe :: xs1.tpes case nil => Nil - } /** A trait for loaders that compute trees. Currently implemented just by DottyUnpickler. */ - trait TreeProvider { + trait TreeProvider: protected def computeRootTrees(using Context): List[Tree] private var myTrees: List[Tree] | Null = _ /** Get trees defined by this provider. Cache them if -Yretain-trees is set. */ def rootTrees(using Context): List[Tree] = - if (ctx.settings.YretainTrees.value) { + if (ctx.settings.YretainTrees.value) if (myTrees == null) myTrees = computeRootTrees myTrees.uncheckedNN - } else computeRootTrees /** Get first tree defined by this provider, or EmptyTree if none exists */ @@ -1304,21 +1217,18 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** Is it possible that the tree to load contains a definition of or reference to `id`? */ def mightContain(id: String)(using Context): Boolean = true - } // convert a numeric with a toXXX method - def primitiveConversion(tree: Tree, numericCls: Symbol)(using Context): Tree = { + def primitiveConversion(tree: Tree, numericCls: Symbol)(using Context): Tree = val mname = "to".concat(numericCls.name) val conversion = tree.tpe member(mname) if (conversion.symbol.exists) tree.select(conversion.symbol.termRef).ensureApplied else if (tree.tpe.widen isRef numericCls) tree - else { + else report.warning(em"conversion from ${tree.tpe.widen} to ${numericCls.typeRef} will always fail at runtime.") Throw(New(defn.ClassCastExceptionClass.typeRef, Nil)).withSpan(tree.span) - } - } /** A tree that corresponds to `Predef.classOf[$tp]` in source */ def clsOf(tp: Type)(using Context): Tree = @@ -1344,10 +1254,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { */ def letBindUnless(level: TreeInfo.PurityLevel, tree: Tree)(within: Tree => Tree)(using Context): Tree = if (exprPurity(tree) >= level) within(tree) - else { + else val vdef = SyntheticValDef(TempResultName.fresh(), tree) Block(vdef :: Nil, within(Ident(vdef.namedType))) - } /** Let bind `tree` unless `tree` is at least idempotent */ def evalOnce(tree: Tree)(within: Tree => Tree)(using Context): Tree = @@ -1381,15 +1290,14 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { /** A key to be used in a context property that tracks the number of inlined trees */ private val InlinedTrees = Property.Key[Counter]() - final class Counter { + final class Counter: var count: Int = 0 - } /** Record an enclosing inlined call. * EmptyTree calls (for parameters) cancel the next-enclosing call in the list instead of being added to it. * We assume parameters are never nested inside parameters. */ - override def inlineContext(call: Tree)(using Context): Context = { + override def inlineContext(call: Tree)(using Context): Context = // We assume enclosingInlineds is already normalized, and only process the new call with the head. val oldIC = enclosingInlineds @@ -1403,7 +1311,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { val ctx1 = ctx.fresh.setProperty(InlinedCalls, newIC) if oldIC.isEmpty then ctx1.setProperty(InlinedTrees, new Counter) else ctx1 - } /** All enclosing calls that are currently inlined, from innermost to outermost. */ @@ -1426,14 +1333,13 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { def sourceFile(call: Tree)(using Context): SourceFile = call.symbol.source /** Desugar identifier into a select node. Return the tree itself if not possible */ - def desugarIdent(tree: Ident)(using Context): RefTree = { + def desugarIdent(tree: Ident)(using Context): RefTree = val qual = desugarIdentPrefix(tree) if (qual.isEmpty) tree else qual.select(tree.symbol) - } /** Recover identifier prefix (e.g. this) if it exists */ - def desugarIdentPrefix(tree: Ident)(using Context): Tree = tree.tpe match { + def desugarIdentPrefix(tree: Ident)(using Context): Tree = tree.tpe match case TermRef(prefix: TermRef, _) => prefix.info match case mt: MethodType if mt.paramInfos.isEmpty && mt.resultType.typeSymbol.is(Module) => @@ -1444,7 +1350,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { This(prefix.cls) case _ => EmptyTree - } /** * The symbols that are imported with `expr.name` @@ -1453,7 +1358,7 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * @param name The name that is being imported. * @return All the symbols that would be imported with `expr.name`. */ - def importedSymbols(expr: Tree, name: Name)(using Context): List[Symbol] = { + def importedSymbols(expr: Tree, name: Name)(using Context): List[Symbol] = def lookup(name: Name): Symbol = expr.tpe.member(name).symbol val symbols = List(lookup(name.toTermName), @@ -1462,7 +1367,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { lookup(name.sourceModuleName)) symbols.map(_.sourceSymbol).filter(_.exists).distinct - } /** * All the symbols that are imported by the first selector of `imp` that matches @@ -1483,13 +1387,13 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { * The list of select trees that resolve to the same symbols as the ones that are imported * by `imp`. */ - def importSelections(imp: Import)(using Context): List[Select] = { - def imported(sym: Symbol, id: untpd.Ident, rename: Option[untpd.Ident]): List[Select] = { + def importSelections(imp: Import)(using Context): List[Select] = + def imported(sym: Symbol, id: untpd.Ident, rename: Option[untpd.Ident]): List[Select] = // Give a zero-extent position to the qualifier to prevent it from being included several // times in results in the language server. val noPosExpr = focusPositions(imp.expr) val selectTree = Select(noPosExpr, sym.name).withSpan(id.span) - rename match { + rename match case None => selectTree :: Nil case Some(rename) => @@ -1499,8 +1403,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { val actual = Select(noPosExpr, sym.name) val renameTree = Select(noPosExpr, name).withSpan(rename.span).withType(actual.tpe) selectTree :: renameTree :: Nil - } - } imp.selectors.flatMap { sel => if sel.isWildcard then Nil @@ -1512,10 +1414,9 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { imported(sym, sel.imported, renamedOpt) } } - } /** Creates the tuple type tree representation of the type trees in `ts` */ - def tupleTypeTree(elems: List[Tree])(using Context): Tree = { + def tupleTypeTree(elems: List[Tree])(using Context): Tree = val arity = elems.length if arity <= Definitions.MaxTupleArity then val tupleTp = defn.TupleType(arity) @@ -1523,7 +1424,6 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { AppliedTypeTree(TypeTree(tupleTp), elems) else nestedPairsTypeTree(elems) else nestedPairsTypeTree(elems) - } /** Creates the nested pairs type tree representation of the type trees in `ts` */ def nestedPairsTypeTree(ts: List[Tree])(using Context): Tree = @@ -1534,13 +1434,11 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { ts.foldRight[Tree](TypeTree(defn.QuoteMatching_KNil.typeRef))((x, acc) => AppliedTypeTree(TypeTree(defn.QuoteMatching_KCons.typeRef), x :: acc :: Nil)) /** Replaces all positions in `tree` with zero-extent positions */ - private def focusPositions(tree: Tree)(using Context): Tree = { - val transformer = new tpd.TreeMap { + private def focusPositions(tree: Tree)(using Context): Tree = + val transformer = new tpd.TreeMap: override def transform(tree: Tree)(using Context): Tree = super.transform(tree).withSpan(tree.span.focus) - } transformer.transform(tree) - } /** Convert a list of trees to a vararg-compatible tree. * Used to make arguments for methods that accept varargs. @@ -1565,4 +1463,3 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { protected def FunProto(args: List[Tree], resType: Type)(using Context) = ProtoTypes.FunProtoTyped(args, resType)(ctx.typer, ApplyKind.Regular) -} diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index e3488034fef8..4dd52788b04b 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -12,39 +12,35 @@ import annotation.constructorOnly import annotation.internal.sharable import Decorators._ -object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { +object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo: // ----- Tree cases that exist in untyped form only ------------------ - abstract class OpTree(implicit @constructorOnly src: SourceFile) extends Tree { + abstract class OpTree(implicit @constructorOnly src: SourceFile) extends Tree: def op: Ident override def isTerm: Boolean = op.isTerm override def isType: Boolean = op.isType - } /** A typed subtree of an untyped tree needs to be wrapped in a TypedSplice * @param owner The current owner at the time the tree was defined * @param isExtensionReceiver The splice was created from the receiver `e` in an extension * method call `e.f(...)` */ - abstract case class TypedSplice(splice: tpd.Tree)(val owner: Symbol, val isExtensionReceiver: Boolean)(implicit @constructorOnly src: SourceFile) extends ProxyTree { + abstract case class TypedSplice(splice: tpd.Tree)(val owner: Symbol, val isExtensionReceiver: Boolean)(implicit @constructorOnly src: SourceFile) extends ProxyTree: def forwardTo: tpd.Tree = splice override def toString = def ext = if isExtensionReceiver then ", isExtensionReceiver = true" else "" s"TypedSplice($splice$ext)" - } - object TypedSplice { + object TypedSplice: def apply(tree: tpd.Tree, isExtensionReceiver: Boolean = false)(using Context): TypedSplice = new TypedSplice(tree)(ctx.owner, isExtensionReceiver) {} - } /** mods object name impl */ case class ModuleDef(name: TermName, impl: Template)(implicit @constructorOnly src: SourceFile) - extends MemberDef { + extends MemberDef: type ThisTree[+T <: Untyped] <: Trees.NameTree[T] with Trees.MemberDef[T] with ModuleDef def withName(name: Name)(using Context): ModuleDef = cpy.ModuleDef(this)(name.toTermName, impl) - } /** An untyped template with a derives clause. Derived parents are added to the end * of the `parents` list. `derivedCount` keeps track of how many there are. @@ -53,11 +49,10 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { * - keep invariant that elements of untyped trees align with source positions */ class DerivingTemplate(constr: DefDef, parentsOrDerived: List[Tree], self: ValDef, preBody: LazyTreeList, derivedCount: Int)(implicit @constructorOnly src: SourceFile) - extends Template(constr, parentsOrDerived, self, preBody) { + extends Template(constr, parentsOrDerived, self, preBody): private val myParents = parentsOrDerived.dropRight(derivedCount) override def parents(using Context) = myParents override val derived = parentsOrDerived.takeRight(derivedCount) - } case class ParsedTry(expr: Tree, handler: Tree, finalizer: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree @@ -71,24 +66,21 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { extends TermTree /** A function type or closure */ - case class Function(args: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { + case class Function(args: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree: override def isTerm: Boolean = body.isTerm override def isType: Boolean = body.isType - } /** A function type or closure with `implicit` or `given` modifiers and information on which parameters are `erased` */ class FunctionWithMods(args: List[Tree], body: Tree, val mods: Modifiers, val erasedParams: List[Boolean])(implicit @constructorOnly src: SourceFile) - extends Function(args, body) { + extends Function(args, body): assert(args.length == erasedParams.length) def hasErasedParams = erasedParams.contains(true) - } /** A polymorphic function type */ - case class PolyFunction(targs: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree { + case class PolyFunction(targs: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends Tree: override def isTerm = body.isTerm override def isType = body.isType - } /** A function created from a wildcard expression * @param placeholderParams a list of definitions of synthetic parameters. @@ -103,13 +95,11 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class InfixOp(left: Tree, op: Ident, right: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree case class PostfixOp(od: Tree, op: Ident)(implicit @constructorOnly src: SourceFile) extends OpTree case class PrefixOp(op: Ident, od: Tree)(implicit @constructorOnly src: SourceFile) extends OpTree - case class Parens(t: Tree)(implicit @constructorOnly src: SourceFile) extends ProxyTree { + case class Parens(t: Tree)(implicit @constructorOnly src: SourceFile) extends ProxyTree: def forwardTo: Tree = t - } - case class Tuple(trees: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree { + case class Tuple(trees: List[Tree])(implicit @constructorOnly src: SourceFile) extends Tree: override def isTerm: Boolean = trees.isEmpty || trees.head.isTerm override def isType: Boolean = !isTerm - } case class Throw(expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree case class ForYield(enums: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree case class ForDo(enums: List[Tree], body: Tree)(implicit @constructorOnly src: SourceFile) extends TermTree @@ -121,7 +111,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class Into(tpt: Tree)(implicit @constructorOnly src: SourceFile) extends Tree case class MacroTree(expr: Tree)(implicit @constructorOnly src: SourceFile) extends Tree - case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree { + case class ImportSelector(imported: Ident, renamed: Tree = EmptyTree, bound: Tree = EmptyTree)(implicit @constructorOnly src: SourceFile) extends Tree: // TODO: Make bound a typed tree? /** It's a `given` selector */ @@ -137,15 +127,13 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { val rename: TermName = renamed match case Ident(rename: TermName) => rename case _ => name - } case class Number(digits: String, kind: NumberKind)(implicit @constructorOnly src: SourceFile) extends TermTree - enum NumberKind { + enum NumberKind: case Whole(radix: Int) case Decimal case Floating - } /** {x1, ..., xN} T (only relevant under captureChecking) */ case class CapturesAndResult(refs: List[Tree], parent: Tree)(implicit @constructorOnly src: SourceFile) extends TypTree @@ -153,9 +141,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** Short-lived usage in typer, does not need copy/transform/fold infrastructure */ case class DependentTypeTree(tp: List[Symbol] => Type)(implicit @constructorOnly src: SourceFile) extends Tree - @sharable object EmptyTypeIdent extends Ident(tpnme.EMPTY)(NoSource) with WithoutTypeOrPos[Untyped] { + @sharable object EmptyTypeIdent extends Ident(tpnme.EMPTY)(NoSource) with WithoutTypeOrPos[Untyped]: override def isEmpty: Boolean = true - } def WildcardTypeBoundsTree()(using src: SourceFile): TypeBoundsTree = TypeBoundsTree(EmptyTree, EmptyTree, EmptyTree) object WildcardTypeBoundsTree: @@ -169,13 +156,12 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { class XMLBlock(stats: List[Tree], expr: Tree)(implicit @constructorOnly src: SourceFile) extends Block(stats, expr) /** An enum to control checking or filtering of patterns in GenFrom trees */ - enum GenCheckMode { + enum GenCheckMode: case Ignore // neither filter nor check since filtering was done before case Check // check that pattern is irrefutable case CheckAndFilter // both check and filter (transitional period starting with 3.2) case FilterNow // filter out non-matching elements if we are not in 3.2 or later case FilterAlways // filter out non-matching elements since pattern is prefixed by `case` - } // ----- Modifiers ----------------------------------------------------- /** Mod is intended to record syntactic information about modifiers, it's @@ -186,7 +172,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { sealed abstract class Mod(val flags: FlagSet)(implicit @constructorOnly src: SourceFile) extends Positioned - object Mod { + object Mod: case class Private()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Private) case class Protected()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Protected) @@ -221,7 +207,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** Used under pureFunctions to mark impure function types `A => B` in `FunctionWithMods` */ case class Impure()(implicit @constructorOnly src: SourceFile) extends Mod(Flags.Impure) - } /** Modifiers and annotations for definitions * @@ -234,7 +219,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { flags: FlagSet = EmptyFlags, privateWithin: TypeName = tpnme.EMPTY, annotations: List[Tree] = Nil, - mods: List[Mod] = Nil) { + mods: List[Mod] = Nil): def is(flag: Flag): Boolean = flags.is(flag) def is(flag: Flag, butNot: FlagSet): Boolean = flags.is(flag, butNot = butNot) @@ -286,7 +271,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { */ def withMods(ms: List[Mod]): Modifiers = if (mods eq ms) this - else { + else if (ms.nonEmpty) for (m <- ms) assert(flags.isAllOf(m.flags) @@ -294,7 +279,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { || (m.isInstanceOf[Mod.Abstract] || m.isInstanceOf[Mod.Override]) && flags.is(AbsOverride), s"unaccounted modifier: $m in $this with flags ${flags.flagsString} when adding $ms") copy(mods = ms) - } def withAddedAnnotation(annot: Tree): Modifiers = if (annotations.exists(_ eq annot)) this @@ -317,7 +301,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def isEnumCase: Boolean = isEnum && is(Case) def isEnumClass: Boolean = isEnum && !is(Case) - } @sharable val EmptyModifiers: Modifiers = Modifiers() @@ -326,7 +309,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** A type tree that gets its type from some other tree's symbol. Enters the * type tree in the References attachment of the `from` tree as a side effect. */ - abstract class DerivedTypeTree(implicit @constructorOnly src: SourceFile) extends TypeTree { + abstract class DerivedTypeTree(implicit @constructorOnly src: SourceFile) extends TypeTree: private var myWatched: Tree = EmptyTree @@ -334,12 +317,11 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def watched: Tree = myWatched /** Install the derived type tree as a dependency on `original` */ - def watching(original: DefTree): this.type = { + def watching(original: DefTree): this.type = myWatched = original val existing = original.attachmentOrElse(References, Nil) original.putAttachment(References, this :: existing) this - } /** Install the derived type tree as a dependency on `sym` */ def watching(sym: Symbol): this.type = withAttachment(OriginalSymbol, sym) @@ -351,7 +333,6 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** The method that computes the tree with the derived type */ def derivedTree(originalSym: Symbol)(using Context): tpd.Tree - } /** Property key containing TypeTrees whose type is computed * from the symbol in this type. These type trees have marker trees @@ -444,8 +425,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** A new expression with constrictor and possibly type arguments. See * `New(tpt, argss)` for details. */ - def makeNew(tpt: Tree)(using Context): Tree = { - val (tycon, targs) = tpt match { + def makeNew(tpt: Tree)(using Context): Tree = + val (tycon, targs) = tpt match case AppliedTypeTree(tycon, targs) => (tycon, targs) case TypedSplice(tpt1: tpd.Tree) => @@ -454,10 +435,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { (tpt, argTypes.map(wrap)) case _ => (tpt, Nil) - } val nu: Tree = Select(New(tycon), nme.CONSTRUCTOR) if (targs.nonEmpty) TypeApply(nu, targs) else nu - } def Block(stat: Tree, expr: Tree)(implicit src: SourceFile): Block = Block(stat :: Nil, expr) @@ -465,10 +444,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def Apply(fn: Tree, arg: Tree)(implicit src: SourceFile): Apply = Apply(fn, arg :: Nil) - def ensureApplied(tpt: Tree)(implicit src: SourceFile): Tree = tpt match { + def ensureApplied(tpt: Tree)(implicit src: SourceFile): Tree = tpt match case _: Apply => tpt case _ => Apply(tpt, Nil) - } def AppliedTypeTree(tpt: Tree, arg: Tree)(implicit src: SourceFile): AppliedTypeTree = AppliedTypeTree(tpt, arg :: Nil) @@ -514,24 +492,21 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { def makeSelfDef(name: TermName, tpt: Tree)(using Context): ValDef = ValDef(name, tpt, EmptyTree).withFlags(PrivateLocal) - def makeTupleOrParens(ts: List[Tree])(using Context): Tree = ts match { + def makeTupleOrParens(ts: List[Tree])(using Context): Tree = ts match case t :: Nil => Parens(t) case _ => Tuple(ts) - } - def makeTuple(ts: List[Tree])(using Context): Tree = ts match { + def makeTuple(ts: List[Tree])(using Context): Tree = ts match case t :: Nil => t case _ => Tuple(ts) - } def makeAndType(left: Tree, right: Tree)(using Context): AppliedTypeTree = AppliedTypeTree(ref(defn.andType.typeRef), left :: right :: Nil) - def makeParameter(pname: TermName, tpe: Tree, mods: Modifiers, isBackquoted: Boolean = false)(using Context): ValDef = { + def makeParameter(pname: TermName, tpe: Tree, mods: Modifiers, isBackquoted: Boolean = false)(using Context): ValDef = val vdef = ValDef(pname, tpe, EmptyTree) if (isBackquoted) vdef.pushAttachment(Backquoted, ()) vdef.withMods(mods | Param) - } def makeSyntheticParameter(n: Int = 1, tpt: Tree | Null = null, flags: FlagSet = SyntheticTermParam)(using Context): ValDef = ValDef(nme.syntheticParamName(n), if (tpt == null) TypeTree() else tpt, EmptyTree) @@ -549,10 +524,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { /** A reference to given definition. If definition is a repeated * parameter, the reference will be a repeated argument. */ - def refOfDef(tree: MemberDef)(using Context): Tree = tree match { + def refOfDef(tree: MemberDef)(using Context): Tree = tree match case ValDef(_, PostfixOp(_, Ident(tpnme.raw.STAR)), _) => repeated(Ident(tree.name)) case _ => Ident(tree.name) - } /** A repeated argument such as `arg: _*` */ def repeated(arg: Tree)(using Context): Typed = Typed(arg, Ident(tpnme.WILDCARD_STAR)) @@ -564,120 +538,96 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { override val cpy: UntypedTreeCopier = UntypedTreeCopier() - class UntypedTreeCopier extends TreeCopier { + class UntypedTreeCopier extends TreeCopier: def postProcess(tree: Tree, copied: Tree): copied.ThisTree[Untyped] = copied.asInstanceOf[copied.ThisTree[Untyped]] def postProcess(tree: Tree, copied: MemberDef): copied.ThisTree[Untyped] = { - tree match { + tree match case tree: MemberDef => copied.withMods(tree.rawMods) case _ => copied - } }.asInstanceOf[copied.ThisTree[Untyped]] - def ModuleDef(tree: Tree)(name: TermName, impl: Template)(using Context): ModuleDef = tree match { + def ModuleDef(tree: Tree)(name: TermName, impl: Template)(using Context): ModuleDef = tree match case tree: ModuleDef if (name eq tree.name) && (impl eq tree.impl) => tree case _ => finalize(tree, untpd.ModuleDef(name, impl)(tree.source)) - } - def ParsedTry(tree: Tree)(expr: Tree, handler: Tree, finalizer: Tree)(using Context): TermTree = tree match { + def ParsedTry(tree: Tree)(expr: Tree, handler: Tree, finalizer: Tree)(using Context): TermTree = tree match case tree: ParsedTry if (expr eq tree.expr) && (handler eq tree.handler) && (finalizer eq tree.finalizer) => tree case _ => finalize(tree, untpd.ParsedTry(expr, handler, finalizer)(tree.source)) - } - def SymbolLit(tree: Tree)(str: String)(using Context): TermTree = tree match { + def SymbolLit(tree: Tree)(str: String)(using Context): TermTree = tree match case tree: SymbolLit if str == tree.str => tree case _ => finalize(tree, untpd.SymbolLit(str)(tree.source)) - } - def InterpolatedString(tree: Tree)(id: TermName, segments: List[Tree])(using Context): TermTree = tree match { + def InterpolatedString(tree: Tree)(id: TermName, segments: List[Tree])(using Context): TermTree = tree match case tree: InterpolatedString if (id eq tree.id) && (segments eq tree.segments) => tree case _ => finalize(tree, untpd.InterpolatedString(id, segments)(tree.source)) - } - def Function(tree: Tree)(args: List[Tree], body: Tree)(using Context): Tree = tree match { + def Function(tree: Tree)(args: List[Tree], body: Tree)(using Context): Tree = tree match case tree: Function if (args eq tree.args) && (body eq tree.body) => tree case _ => finalize(tree, untpd.Function(args, body)(tree.source)) - } - def PolyFunction(tree: Tree)(targs: List[Tree], body: Tree)(using Context): Tree = tree match { + def PolyFunction(tree: Tree)(targs: List[Tree], body: Tree)(using Context): Tree = tree match case tree: PolyFunction if (targs eq tree.targs) && (body eq tree.body) => tree case _ => finalize(tree, untpd.PolyFunction(targs, body)(tree.source)) - } - def InfixOp(tree: Tree)(left: Tree, op: Ident, right: Tree)(using Context): Tree = tree match { + def InfixOp(tree: Tree)(left: Tree, op: Ident, right: Tree)(using Context): Tree = tree match case tree: InfixOp if (left eq tree.left) && (op eq tree.op) && (right eq tree.right) => tree case _ => finalize(tree, untpd.InfixOp(left, op, right)(tree.source)) - } - def PostfixOp(tree: Tree)(od: Tree, op: Ident)(using Context): Tree = tree match { + def PostfixOp(tree: Tree)(od: Tree, op: Ident)(using Context): Tree = tree match case tree: PostfixOp if (od eq tree.od) && (op eq tree.op) => tree case _ => finalize(tree, untpd.PostfixOp(od, op)(tree.source)) - } - def PrefixOp(tree: Tree)(op: Ident, od: Tree)(using Context): Tree = tree match { + def PrefixOp(tree: Tree)(op: Ident, od: Tree)(using Context): Tree = tree match case tree: PrefixOp if (op eq tree.op) && (od eq tree.od) => tree case _ => finalize(tree, untpd.PrefixOp(op, od)(tree.source)) - } - def Parens(tree: Tree)(t: Tree)(using Context): ProxyTree = tree match { + def Parens(tree: Tree)(t: Tree)(using Context): ProxyTree = tree match case tree: Parens if t eq tree.t => tree case _ => finalize(tree, untpd.Parens(t)(tree.source)) - } - def Tuple(tree: Tree)(trees: List[Tree])(using Context): Tree = tree match { + def Tuple(tree: Tree)(trees: List[Tree])(using Context): Tree = tree match case tree: Tuple if trees eq tree.trees => tree case _ => finalize(tree, untpd.Tuple(trees)(tree.source)) - } - def Throw(tree: Tree)(expr: Tree)(using Context): TermTree = tree match { + def Throw(tree: Tree)(expr: Tree)(using Context): TermTree = tree match case tree: Throw if expr eq tree.expr => tree case _ => finalize(tree, untpd.Throw(expr)(tree.source)) - } - def ForYield(tree: Tree)(enums: List[Tree], expr: Tree)(using Context): TermTree = tree match { + def ForYield(tree: Tree)(enums: List[Tree], expr: Tree)(using Context): TermTree = tree match case tree: ForYield if (enums eq tree.enums) && (expr eq tree.expr) => tree case _ => finalize(tree, untpd.ForYield(enums, expr)(tree.source)) - } - def ForDo(tree: Tree)(enums: List[Tree], body: Tree)(using Context): TermTree = tree match { + def ForDo(tree: Tree)(enums: List[Tree], body: Tree)(using Context): TermTree = tree match case tree: ForDo if (enums eq tree.enums) && (body eq tree.body) => tree case _ => finalize(tree, untpd.ForDo(enums, body)(tree.source)) - } - def GenFrom(tree: Tree)(pat: Tree, expr: Tree, checkMode: GenCheckMode)(using Context): Tree = tree match { + def GenFrom(tree: Tree)(pat: Tree, expr: Tree, checkMode: GenCheckMode)(using Context): Tree = tree match case tree: GenFrom if (pat eq tree.pat) && (expr eq tree.expr) && (checkMode == tree.checkMode) => tree case _ => finalize(tree, untpd.GenFrom(pat, expr, checkMode)(tree.source)) - } - def GenAlias(tree: Tree)(pat: Tree, expr: Tree)(using Context): Tree = tree match { + def GenAlias(tree: Tree)(pat: Tree, expr: Tree)(using Context): Tree = tree match case tree: GenAlias if (pat eq tree.pat) && (expr eq tree.expr) => tree case _ => finalize(tree, untpd.GenAlias(pat, expr)(tree.source)) - } - def ContextBounds(tree: Tree)(bounds: TypeBoundsTree, cxBounds: List[Tree])(using Context): TypTree = tree match { + def ContextBounds(tree: Tree)(bounds: TypeBoundsTree, cxBounds: List[Tree])(using Context): TypTree = tree match case tree: ContextBounds if (bounds eq tree.bounds) && (cxBounds eq tree.cxBounds) => tree case _ => finalize(tree, untpd.ContextBounds(bounds, cxBounds)(tree.source)) - } - def PatDef(tree: Tree)(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(using Context): Tree = tree match { + def PatDef(tree: Tree)(mods: Modifiers, pats: List[Tree], tpt: Tree, rhs: Tree)(using Context): Tree = tree match case tree: PatDef if (mods eq tree.mods) && (pats eq tree.pats) && (tpt eq tree.tpt) && (rhs eq tree.rhs) => tree case _ => finalize(tree, untpd.PatDef(mods, pats, tpt, rhs)(tree.source)) - } def ExtMethods(tree: Tree)(paramss: List[ParamClause], methods: List[Tree])(using Context): Tree = tree match case tree: ExtMethods if (paramss eq tree.paramss) && (methods == tree.methods) => tree case _ => finalize(tree, untpd.ExtMethods(paramss, methods)(tree.source)) def Into(tree: Tree)(tpt: Tree)(using Context): Tree = tree match case tree: Into if tpt eq tree.tpt => tree case _ => finalize(tree, untpd.Into(tpt)(tree.source)) - def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match { + def ImportSelector(tree: Tree)(imported: Ident, renamed: Tree, bound: Tree)(using Context): Tree = tree match case tree: ImportSelector if (imported eq tree.imported) && (renamed eq tree.renamed) && (bound eq tree.bound) => tree case _ => finalize(tree, untpd.ImportSelector(imported, renamed, bound)(tree.source)) - } - def Number(tree: Tree)(digits: String, kind: NumberKind)(using Context): Tree = tree match { + def Number(tree: Tree)(digits: String, kind: NumberKind)(using Context): Tree = tree match case tree: Number if (digits == tree.digits) && (kind == tree.kind) => tree case _ => finalize(tree, untpd.Number(digits, kind)) - } def CapturesAndResult(tree: Tree)(refs: List[Tree], parent: Tree)(using Context): Tree = tree match case tree: CapturesAndResult if (refs eq tree.refs) && (parent eq tree.parent) => tree case _ => finalize(tree, untpd.CapturesAndResult(refs, parent)) - def TypedSplice(tree: Tree)(splice: tpd.Tree)(using Context): ProxyTree = tree match { + def TypedSplice(tree: Tree)(splice: tpd.Tree)(using Context): ProxyTree = tree match case tree: TypedSplice if splice `eq` tree.splice => tree case _ => finalize(tree, untpd.TypedSplice(splice)(using ctx)) - } - def MacroTree(tree: Tree)(expr: Tree)(using Context): Tree = tree match { + def MacroTree(tree: Tree)(expr: Tree)(using Context): Tree = tree match case tree: MacroTree if expr `eq` tree.expr => tree case _ => finalize(tree, untpd.MacroTree(expr)(tree.source)) - } - } - abstract class UntypedTreeMap(cpy: UntypedTreeCopier = untpd.cpy) extends TreeMap(cpy) { - override def transformMoreCases(tree: Tree)(using Context): Tree = tree match { + abstract class UntypedTreeMap(cpy: UntypedTreeCopier = untpd.cpy) extends TreeMap(cpy): + override def transformMoreCases(tree: Tree)(using Context): Tree = tree match case ModuleDef(name, impl) => cpy.ModuleDef(tree)(name, transformSub(impl)) case tree: DerivingTemplate => @@ -731,11 +681,9 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.CapturesAndResult(tree)(transform(refs), transform(parent)) case _ => super.transformMoreCases(tree) - } - } abstract class UntypedTreeAccumulator[X] extends TreeAccumulator[X] { self => - override def foldMoreCases(x: X, tree: Tree)(using Context): X = tree match { + override def foldMoreCases(x: X, tree: Tree)(using Context): X = tree match case ModuleDef(name, impl) => this(x, impl) case tree: DerivingTemplate => @@ -790,28 +738,22 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(this(x, refs), parent) case _ => super.foldMoreCases(x, tree) - } } - abstract class UntypedTreeTraverser extends UntypedTreeAccumulator[Unit] { + abstract class UntypedTreeTraverser extends UntypedTreeAccumulator[Unit]: def traverse(tree: Tree)(using Context): Unit def apply(x: Unit, tree: Tree)(using Context): Unit = traverse(tree) protected def traverseChildren(tree: Tree)(using Context): Unit = foldOver((), tree) - } /** Fold `f` over all tree nodes, in depth-first, prefix order */ - class UntypedDeepFolder[X](f: (X, Tree) => X) extends UntypedTreeAccumulator[X] { + class UntypedDeepFolder[X](f: (X, Tree) => X) extends UntypedTreeAccumulator[X]: def apply(x: X, tree: Tree)(using Context): X = foldOver(f(x, tree), tree) - } /** Is there a subtree of this tree that satisfies predicate `p`? */ - extension (tree: Tree) def existsSubTree(p: Tree => Boolean)(using Context): Boolean = { - val acc = new UntypedTreeAccumulator[Boolean] { + extension (tree: Tree) def existsSubTree(p: Tree => Boolean)(using Context): Boolean = + val acc = new UntypedTreeAccumulator[Boolean]: def apply(x: Boolean, t: Tree)(using Context) = x || p(t) || foldOver(x, t) - } acc(false, tree) - } protected def FunProto(args: List[Tree], resType: Type)(using Context) = ProtoTypes.FunProto(args, resType)(ctx.typer, ApplyKind.Regular) -} diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala b/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala index fd89159e2076..92495f956fc0 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureAnnotation.scala @@ -29,11 +29,10 @@ case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) exte /** Reconstitute annotation tree from capture set */ override def tree(using Context) = - val elems = refs.elems.toList.map { + val elems = refs.elems.toList.map: case cr: TermRef => ref(cr) case cr: TermParamRef => untpd.Ident(cr.paramName).withType(cr) case cr: ThisType => This(cr.cls) - } val arg = repeated(elems, TypeTree(defn.AnyType)) New(symbol.typeRef, arg :: Nil) @@ -59,10 +58,9 @@ case class CaptureAnnotation(refs: CaptureSet, boxed: Boolean)(cls: Symbol) exte else EmptyAnnotation override def refersToParamOf(tl: TermLambda)(using Context): Boolean = - refs.elems.exists { + refs.elems.exists: case TermParamRef(tl1, _) => tl eq tl1 case _ => false - } override def toText(printer: Printer): Text = refs.toText(printer) diff --git a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala index fdc4f66beafa..56acf24e987a 100644 --- a/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala +++ b/compiler/src/dotty/tools/dotc/cc/CaptureSet.scala @@ -72,10 +72,9 @@ sealed abstract class CaptureSet extends Showable: /** Does this capture set contain the root reference `cap` as element? */ final def isUniversal(using Context) = - elems.exists { + elems.exists: case ref: TermRef => ref.symbol == defn.captureRoot case _ => false - } /** Add new elements to this capture set if allowed. * @pre `newElems` is not empty and does not overlap with `this.elems`. @@ -123,10 +122,9 @@ sealed abstract class CaptureSet extends Showable: * as frozen. */ def accountsFor(x: CaptureRef)(using Context): Boolean = - reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true) { + reporting.trace(i"$this accountsFor $x, ${x.captureSetOfInfo}?", show = true): elems.exists(_.subsumes(x)) || !x.isRootCapability && x.captureSetOfInfo.subCaptures(this, frozen = true).isOK - } /** A more optimistic version of accountsFor, which does not take variable supersets * of the `x` reference into account. A set might account for `x` if it accounts @@ -136,14 +134,13 @@ sealed abstract class CaptureSet extends Showable: * root capability `cap`. */ def mightAccountFor(x: CaptureRef)(using Context): Boolean = - reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true) { + reporting.trace(i"$this mightAccountFor $x, ${x.captureSetOfInfo}?", show = true): elems.exists(elem => elem.subsumes(x) || elem.isRootCapability) || !x.isRootCapability && { val elems = x.captureSetOfInfo.elems !elems.isEmpty && elems.forall(mightAccountFor) } - } /** A more optimistic version of subCaptures used to choose one of two typing rules * for selections and applications. `cs1 mightSubcapture cs2` if `cs2` might account for @@ -182,7 +179,7 @@ sealed abstract class CaptureSet extends Showable: */ def =:= (that: CaptureSet)(using Context): Boolean = this.subCaptures(that, frozen = true).isOK - && that.subCaptures(this, frozen = true).isOK + && that.subCaptures(this, frozen = true).isOK /** The smallest capture set (via <:<) that is a superset of both * `this` and `that` @@ -495,10 +492,9 @@ object CaptureSet: * establishes a record of all variables printed in an error message. * Prints variables wih ids under -Ycc-debug. */ - override def toText(printer: Printer): Text = inContext(printer.printerContext) { + override def toText(printer: Printer): Text = inContext(printer.printerContext): for vars <- ctx.property(ShownVars) do vars += this super.toText(printer) ~ (Str(ids) provided !isConst && ctx.settings.YccDebug.value) - } override def toString = s"Var$id$elems" end Var @@ -553,12 +549,11 @@ object CaptureSet: // as superset; no mapping is necessary or allowed. Const(newElems) super.addNewElems(added.elems, origin) - .andAlso { + .andAlso: if added.isConst then CompareResult.OK else if added.asVar.recordDepsState() then { addAsDependentTo(added); CompareResult.OK } else CompareResult.fail(this) - } - .andAlso { + .andAlso: if (origin ne source) && (origin ne initial) && mapIsIdempotent then // `tm` is idempotent, propagate back elems from image set. // This is sound, since we know that for `r in newElems: tm(r) = r`, hence @@ -576,7 +571,6 @@ object CaptureSet: CompareResult.fail(this) else CompareResult.OK - } override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = if source eq origin then @@ -604,10 +598,9 @@ object CaptureSet: super.addNewElems(newElems.map(bimap.forward), origin) else super.addNewElems(newElems, origin) - .andAlso { + .andAlso: source.tryInclude(newElems.map(bimap.backward), this) .showing(i"propagating new elems ${CaptureSet(newElems)} backward from $this to $source", capt) - } /** For a BiTypeMap, supertypes of the mapped type also constrain * the source via the inverse type mapping and vice versa. That is, if @@ -637,10 +630,9 @@ object CaptureSet: // Filtered elements have to be back-propagated to source. // Elements that don't satisfy `p` are not allowed. super.addNewElems(newElems, origin) - .andAlso { + .andAlso: if filtered.size == newElems.size then source.tryInclude(newElems, this) else CompareResult.fail(this) - } override def computeApprox(origin: CaptureSet)(using Context): CaptureSet = if source eq origin then @@ -884,7 +876,7 @@ object CaptureSet: def withCaptureSetsExplained[T](op: Context ?=> T)(using ctx: Context): T = if ctx.settings.YccDebug.value then val shownVars = mutable.Set[Var]() - inContext(ctx.withProperty(ShownVars, Some(shownVars))) { + inContext(ctx.withProperty(ShownVars, Some(shownVars))): try op finally val reachable = mutable.Set[Var]() @@ -895,10 +887,9 @@ object CaptureSet: val cv = todo.dequeue() if !reachable.contains(cv) then reachable += cv - cv.deps.foreach { + cv.deps.foreach: case cv: Var => incl(cv) case _ => - } cv match case cv: DerivedVar => incl(cv.source) case _ => @@ -906,6 +897,5 @@ object CaptureSet: println(i"Capture set dependencies:") for cv <- allVars do println(i" ${cv.show.padTo(20, ' ')} :: ${cv.deps.toList}%, %") - } else op end CaptureSet diff --git a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala index 380b6ce5fb81..2d84998a8470 100644 --- a/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala +++ b/compiler/src/dotty/tools/dotc/cc/CheckCaptures.scala @@ -267,7 +267,7 @@ class CheckCaptures extends Recheck, SymTransformer: def markFree(cs: CaptureSet, pos: SrcPos)(using Context): Unit = if !cs.isAlwaysEmpty then forallOuterEnvsUpTo(ctx.owner.topLevelClass) { env => - val included = cs.filter { + val included = cs.filter: case ref: TermRef => (env.nestedInOwner || env.owner != ref.symbol.owner) && env.owner.isContainedIn(ref.symbol.owner) @@ -275,7 +275,6 @@ class CheckCaptures extends Recheck, SymTransformer: (env.nestedInOwner || env.owner != ref.cls) && env.owner.isContainedIn(ref.cls) case _ => false - } capt.println(i"Include call capture $included in ${env.owner}") checkSubset(included, env.captured, pos) } @@ -299,7 +298,7 @@ class CheckCaptures extends Recheck, SymTransformer: * outcome of a `mightSubcapture` test. It picks `{f}` if this might subcapture Cr * and Cr otherwise. */ - override def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context) = { + override def recheckSelection(tree: Select, qualType: Type, name: Name, pt: Type)(using Context) = def disambiguate(denot: Denotation): Denotation = denot match case MultiDenotation(denot1, denot2) => // This case can arise when we try to merge multiple types that have different @@ -309,9 +308,8 @@ class CheckCaptures extends Recheck, SymTransformer: // lead to a failure in disambiguation since neither alternative is better than the // other in a frozen constraint. An example test case is disambiguate-select.scala. // We address the problem by disambiguating while ignoring all capture sets as a fallback. - withMode(Mode.IgnoreCaptures) { + withMode(Mode.IgnoreCaptures): disambiguate(denot1).meet(disambiguate(denot2), qualType) - } case _ => denot val selType = recheckSelection(tree, qualType, name, disambiguate) @@ -329,7 +327,7 @@ class CheckCaptures extends Recheck, SymTransformer: .showing(i"alternate type for select $tree: $selType --> $result, $qualCs / $selCs", capt) else selType - }//.showing(i"recheck sel $tree, $qualType = $result") + //.showing(i"recheck sel $tree, $qualType = $result") /** A specialized implementation of the apply rule. * @@ -356,10 +354,9 @@ class CheckCaptures extends Recheck, SymTransformer: else if meth == defn.Caps_unsafeUnbox then mapArgUsing(_.forceBoxStatus(false)) else if meth == defn.Caps_unsafeBoxFunArg then - mapArgUsing { + mapArgUsing: case defn.FunctionOf(paramtpe :: Nil, restpe, isContectual) => defn.FunctionOf(paramtpe.forceBoxStatus(true) :: Nil, restpe, isContectual) - } else super.recheckApply(tree, pt) match case appType @ CapturingType(appType1, refs) => @@ -668,11 +665,10 @@ class CheckCaptures extends Recheck, SymTransformer: (erefs /: erefs.elems) { (erefs, eref) => eref match case eref: ThisType if isPureContext(ctx.owner, eref.cls) => - erefs ++ arefs.filter { + erefs ++ arefs.filter: case aref: TermRef => eref.cls.isProperlyContainedIn(aref.symbol.owner) case aref: ThisType => eref.cls.isProperlyContainedIn(aref.cls) case _ => false - } case _ => erefs } @@ -751,7 +747,7 @@ class CheckCaptures extends Recheck, SymTransformer: val arrow = if covariant then "~~>" else "<~~" i"adapting $actual $arrow $expected" - def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true) { + def adapt(actual: Type, expected: Type, covariant: Boolean): Type = trace(adaptInfo(actual, expected, covariant), recheckr, show = true): if expected.isInstanceOf[WildcardType] then actual else // Decompose the actual type into the inner shape type, the capture set and the box status @@ -765,7 +761,7 @@ class CheckCaptures extends Recheck, SymTransformer: val insertBox = needsAdaptation && covariant != boxed // Adapt the inner shape type: get the adapted shape type, and the capture set leaked during adaptation - val (styp1, leaked) = styp match { + val (styp1, leaked) = styp match case actual @ AppliedType(tycon, args) if defn.isNonRefinedFunction(actual) => adaptFun(actual, args.init, args.last, expected, covariant, insertBox, (aargs1, ares1) => actual.derivedAppliedType(tycon, aargs1 :+ ares1)) @@ -788,7 +784,6 @@ class CheckCaptures extends Recheck, SymTransformer: ) case _ => (styp, CaptureSet()) - } // Capture set of the term after adaptation val cs1 = cs ++ leaked @@ -822,7 +817,6 @@ class CheckCaptures extends Recheck, SymTransformer: adaptedType(!boxed) else adaptedType(boxed) - } var actualw = actual.widenDealias actual match @@ -846,7 +840,7 @@ class CheckCaptures extends Recheck, SymTransformer: * But maybe we can then elide the check during the RefChecks phase under captureChecking? */ def checkOverrides = new TreeTraverser: - class OverridingPairsCheckerCC(clazz: ClassSymbol, self: Type, srcPos: SrcPos)(using Context) extends OverridingPairsChecker(clazz, self) { + class OverridingPairsCheckerCC(clazz: ClassSymbol, self: Type, srcPos: SrcPos)(using Context) extends OverridingPairsChecker(clazz, self): /** Check subtype with box adaptation. * This function is passed to RefChecks to check the compatibility of overriding pairs. * @param sym symbol of the field definition that is being checked @@ -868,7 +862,6 @@ class CheckCaptures extends Recheck, SymTransformer: case _ => adapted finally curEnv = saved actual1 frozen_<:< expected1 - } def traverse(t: Tree)(using Context) = t match @@ -880,14 +873,13 @@ class CheckCaptures extends Recheck, SymTransformer: override def checkUnit(unit: CompilationUnit)(using Context): Unit = Setup(preRecheckPhase, thisPhase, recheckDef)(ctx.compilationUnit.tpdTree) //println(i"SETUP:\n${Recheck.addRecheckedTypes.transform(ctx.compilationUnit.tpdTree)}") - withCaptureSetsExplained { + withCaptureSetsExplained: super.checkUnit(unit) checkOverrides.traverse(unit.tpdTree) checkSelfTypes(unit.tpdTree) postCheck(unit.tpdTree) if ctx.settings.YccDebug.value then show(unit.tpdTree) // this does not print tree, but makes its variables visible for dependency printing - } /** Check that self types of subclasses conform to self types of super classes. * (See comment below how this is achieved). The check assumes that classes @@ -898,10 +890,9 @@ class CheckCaptures extends Recheck, SymTransformer: */ def checkSelfTypes(unit: tpd.Tree)(using Context): Unit = val parentTrees = mutable.HashMap[Symbol, List[Tree]]() - unit.foreachSubTree { + unit.foreachSubTree: case cdef @ TypeDef(_, impl: Template) => parentTrees(cdef.symbol) = impl.parents case _ => - } // Perform self type checking. The problem here is that `checkParents` compares a // self type of a subclass with the result of an asSeenFrom of the self type of the // superclass. That's no good. We need to constrain the original superclass self type @@ -913,9 +904,8 @@ class CheckCaptures extends Recheck, SymTransformer: // That means all capture sets of parent self types are constants, so mapping // them with asSeenFrom is OK. while parentTrees.nonEmpty do - val roots = parentTrees.keysIterator.filter { + val roots = parentTrees.keysIterator.filter: cls => !parentTrees(cls).exists(ptree => parentTrees.contains(ptree.tpe.classSymbol)) - } assert(roots.nonEmpty) for case root: ClassSymbol <- roots do checkSelfAgainstParents(root, root.baseClasses) @@ -1048,10 +1038,9 @@ class CheckCaptures extends Recheck, SymTransformer: || // non-local symbols cannot have inferred types since external capture types are not inferred isLocal // local symbols still need explicit types if && !sym.owner.is(Trait) // they are defined in a trait, since we do OverridingPairs checking before capture inference - def isNotPureThis(ref: CaptureRef) = ref match { + def isNotPureThis(ref: CaptureRef) = ref match case ref: ThisType => !ref.cls.isPureClass case _ => true - } if !canUseInferred then val inferred = t.tpt.knownType def checkPure(tp: Type) = tp match diff --git a/compiler/src/dotty/tools/dotc/cc/Setup.scala b/compiler/src/dotty/tools/dotc/cc/Setup.scala index bbe54f14b86c..a486ed734d3b 100644 --- a/compiler/src/dotty/tools/dotc/cc/Setup.scala +++ b/compiler/src/dotty/tools/dotc/cc/Setup.scala @@ -460,7 +460,7 @@ extends tpd.TreeTraverser: else val subst = SubstParams(psyms :: prevPsymss, mt1 :: prevLambdas) psyms.map(psym => subst(psym.info).asInstanceOf[mt.PInfo]), - mt1 => + mt1 => integrateRT(mt.resType, psymss.tail, psyms :: prevPsymss, mt1 :: prevLambdas) ) case info: ExprType => diff --git a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala index 5fe68dd6a7ac..95698192f935 100644 --- a/compiler/src/dotty/tools/dotc/cc/Synthetics.scala +++ b/compiler/src/dotty/tools/dotc/cc/Synthetics.scala @@ -105,10 +105,9 @@ object Synthetics: case _ if idx < owner.asClass.paramGetters.length => val param = owner.asClass.paramGetters(idx) val pinfo = param.info - atPhase(ctx.phase.next) { + atPhase(ctx.phase.next): if pinfo.captureSet.isAlwaysEmpty then info else CapturingType(pinfo.stripCapturing, CaptureSet(param.termRef)) - } case _ => info diff --git a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala index 51b261583feb..62eaa04c1914 100644 --- a/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/AggregateClassPath.scala @@ -21,36 +21,30 @@ import dotty.tools.io.{ AbstractFile, ClassPath, ClassRepresentation, EfficientC * * @param aggregates classpath instances containing entries which this class processes */ -case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { - override def findClassFile(className: String): Option[AbstractFile] = { +case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath: + override def findClassFile(className: String): Option[AbstractFile] = val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) - aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClassFile(className)).collectFirst { + aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClassFile(className)).collectFirst: case Some(x) => x - } - } private val packageIndex: collection.mutable.Map[String, Seq[ClassPath]] = collection.mutable.Map() - private def aggregatesForPackage(pkg: PackageName): Seq[ClassPath] = packageIndex.synchronized { + private def aggregatesForPackage(pkg: PackageName): Seq[ClassPath] = packageIndex.synchronized: packageIndex.getOrElseUpdate(pkg.dottedString, aggregates.filter(_.hasPackage(pkg))) - } - override def findClass(className: String): Option[ClassRepresentation] = { + override def findClass(className: String): Option[ClassRepresentation] = val (pkg, _) = PackageNameUtils.separatePkgAndClassNames(className) def findEntry(isSource: Boolean): Option[ClassRepresentation] = - aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClass(className)).collectFirst { + aggregatesForPackage(PackageName(pkg)).iterator.map(_.findClass(className)).collectFirst: case Some(s: SourceFileEntry) if isSource => s case Some(s: ClassFileEntry) if !isSource => s - } val classEntry = findEntry(isSource = false) val sourceEntry = findEntry(isSource = true) - (classEntry, sourceEntry) match { + (classEntry, sourceEntry) match case (Some(c: ClassFileEntry), Some(s: SourceFileEntry)) => Some(ClassAndSourceFilesEntry(c.file, s.file)) case (c @ Some(_), _) => c case (_, s) => s - } - } override def asURLs: Seq[URL] = aggregates.flatMap(_.asURLs) @@ -58,10 +52,9 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { override def asSourcePathString: String = ClassPath.join(aggregates map (_.asSourcePathString): _*) - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = val aggregatedPackages = aggregates.flatMap(_.packages(inPackage)).distinct aggregatedPackages - } override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = getDistinctEntries(_.classes(inPackage)) @@ -70,44 +63,40 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { getDistinctEntries(_.sources(inPackage)) override private[dotty] def hasPackage(pkg: PackageName): Boolean = aggregates.exists(_.hasPackage(pkg)) - override private[dotty] def list(inPackage: PackageName): ClassPathEntries = { + override private[dotty] def list(inPackage: PackageName): ClassPathEntries = val packages: java.util.HashSet[PackageEntry] = new java.util.HashSet[PackageEntry]() val classesAndSourcesBuffer = collection.mutable.ArrayBuffer[ClassRepresentation]() val onPackage: PackageEntry => Unit = packages.add(_) val onClassesAndSources: ClassRepresentation => Unit = classesAndSourcesBuffer += _ aggregates.foreach { cp => - try { - cp match { + try + cp match case ecp: EfficientClassPath => ecp.list(inPackage, onPackage, onClassesAndSources) case _ => val entries = cp.list(inPackage) entries._1.foreach(entry => packages.add(entry)) classesAndSourcesBuffer ++= entries._2 - } - } catch { + catch case ex: java.io.IOException => val e = FatalError(ex.getMessage) e.initCause(ex) throw e - } } - val distinctPackages: Seq[PackageEntry] = { + val distinctPackages: Seq[PackageEntry] = val arr = packages.toArray(new Array[PackageEntry](packages.size())) ArraySeq.unsafeWrapArray(arr) - } val distinctClassesAndSources = mergeClassesAndSources(classesAndSourcesBuffer) ClassPathEntries(distinctPackages, distinctClassesAndSources) - } /** * Returns only one entry for each name. If there's both a source and a class entry, it * creates an entry containing both of them. If there would be more than one class or source * entries for the same class it always would use the first entry of each type found on a classpath. */ - private def mergeClassesAndSources(entries: scala.collection.Seq[ClassRepresentation]): Seq[ClassRepresentation] = { + private def mergeClassesAndSources(entries: scala.collection.Seq[ClassRepresentation]): Seq[ClassRepresentation] = // based on the implementation from MergedClassPath var count = 0 val indices = util.HashMap[String, Int]() @@ -116,7 +105,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { entry <- entries } { val name = entry.name - if (indices.contains(name)) { + if (indices.contains(name)) val index = indices(name) val existing = mergedEntries(index) @@ -124,39 +113,29 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { mergedEntries(index) = ClassAndSourceFilesEntry(entry.binary.get, existing.source.get) if (existing.source.isEmpty && entry.source.isDefined) mergedEntries(index) = ClassAndSourceFilesEntry(existing.binary.get, entry.source.get) - } - else { + else indices(name) = count mergedEntries += entry count += 1 - } } if (mergedEntries.isEmpty) Nil else mergedEntries.toIndexedSeq - } - private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = { + private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = val seenNames = util.HashSet[String]() val entriesBuffer = new ArrayBuffer[EntryType](1024) for { cp <- aggregates entry <- getEntries(cp) if !seenNames.contains(entry.name) } - { entriesBuffer += entry seenNames += entry.name - } entriesBuffer.toIndexedSeq - } -} -object AggregateClassPath { - def createAggregate(parts: ClassPath*): ClassPath = { +object AggregateClassPath: + def createAggregate(parts: ClassPath*): ClassPath = val elems = new ArrayBuffer[ClassPath]() - parts foreach { + parts foreach: case AggregateClassPath(ps) => elems ++= ps case p => elems += p - } if (elems.size == 1) elems.head else AggregateClassPath(elems.toIndexedSeq) - } -} diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala index 176b6acf9c6c..2f89d0919d49 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPath.scala @@ -6,23 +6,19 @@ package dotty.tools.dotc.classpath import dotty.tools.io.AbstractFile import dotty.tools.io.ClassRepresentation -case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]) { +case class ClassPathEntries(packages: scala.collection.Seq[PackageEntry], classesAndSources: scala.collection.Seq[ClassRepresentation]): def toTuple: (scala.collection.Seq[PackageEntry], scala.collection.Seq[ClassRepresentation]) = (packages, classesAndSources) -} -object ClassPathEntries { +object ClassPathEntries: val empty = ClassPathEntries(Seq.empty, Seq.empty) -} -trait ClassFileEntry extends ClassRepresentation { +trait ClassFileEntry extends ClassRepresentation: def file: AbstractFile -} -trait SourceFileEntry extends ClassRepresentation { +trait SourceFileEntry extends ClassRepresentation: def file: AbstractFile -} -case class PackageName(dottedString: String) { +case class PackageName(dottedString: String): val dirPathTrailingSlashJar: String = FileUtils.dirPathInJar(dottedString) + "/" val dirPathTrailingSlash: String = @@ -33,53 +29,44 @@ case class PackageName(dottedString: String) { def isRoot: Boolean = dottedString.isEmpty - def entryName(entry: String): String = { - if (isRoot) entry else { + def entryName(entry: String): String = + if (isRoot) entry else val builder = new java.lang.StringBuilder(dottedString.length + 1 + entry.length) builder.append(dottedString) builder.append('.') builder.append(entry) builder.toString - } - } -} -trait PackageEntry { +trait PackageEntry: def name: String -} -private[dotty] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry { +private[dotty] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry: final def fileName: String = file.name def name: String = FileUtils.stripClassExtension(file.name) // class name def binary: Option[AbstractFile] = Some(file) def source: Option[AbstractFile] = None -} -private[dotty] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry { +private[dotty] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry: final def fileName: String = file.name def name: String = FileUtils.stripSourceExtension(file.name) def binary: Option[AbstractFile] = None def source: Option[AbstractFile] = Some(file) -} -private[dotty] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepresentation { +private[dotty] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepresentation: final def fileName: String = classFile.name def name: String = FileUtils.stripClassExtension(classFile.name) def binary: Option[AbstractFile] = Some(classFile) def source: Option[AbstractFile] = Some(srcFile) -} private[dotty] case class PackageEntryImpl(name: String) extends PackageEntry -private[dotty] trait NoSourcePaths { +private[dotty] trait NoSourcePaths: def asSourcePathString: String = "" private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = Seq.empty -} -private[dotty] trait NoClassPaths { +private[dotty] trait NoClassPaths: def findClassFile(className: String): Option[AbstractFile] = None private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = Seq.empty -} diff --git a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala index ac8b69381938..5d4e8cb85ab5 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ClassPathFactory.scala @@ -12,7 +12,7 @@ import dotty.tools.dotc.core.Contexts._ * Provides factory methods for classpath. When creating classpath instances for a given path, * it uses proper type of classpath depending on a types of particular files containing sources or classes. */ -class ClassPathFactory { +class ClassPathFactory: /** * Create a new classpath based on the abstract file. */ @@ -54,10 +54,9 @@ class ClassPathFactory { protected def classesInPathImpl(path: String, expand: Boolean)(using Context): List[ClassPath] = for { file <- expandPath(path, expand) - dir <- { + dir <- def asImage = if (file.endsWith(".jimage")) Some(AbstractFile.getFile(file)) else None Option(AbstractFile.getDirectory(file)).orElse(asImage) - } } yield newClassPath(dir) @@ -68,10 +67,9 @@ class ClassPathFactory { new DirectorySourcePath(file.file) else sys.error(s"Unsupported sourcepath element: $file") -} -object ClassPathFactory { - def newClassPath(file: AbstractFile)(using Context): ClassPath = file match { +object ClassPathFactory: + def newClassPath(file: AbstractFile)(using Context): ClassPath = file match case vd: VirtualDirectory => VirtualDirectoryClassPath(vd) case _ => if (file.isJarOrZip) @@ -80,5 +78,3 @@ object ClassPathFactory { new DirectoryClassPath(file.file) else sys.error(s"Unsupported classpath element: $file") - } -} diff --git a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala index 1411493bcbfd..24edde2759f4 100644 --- a/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/DirectoryClassPath.scala @@ -25,7 +25,7 @@ import scala.util.control.NonFatal * when we have a name of a package. * It abstracts over the file representation to work with both JFile and AbstractFile. */ -trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { +trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath: type F val dir: F @@ -48,55 +48,45 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends EfficientCla override private[dotty] def hasPackage(pkg: PackageName): Boolean = getDirectory(pkg).isDefined - private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = val dirForPackage = getDirectory(inPackage) - val nestedDirs: Array[F] = dirForPackage match { + val nestedDirs: Array[F] = dirForPackage match case None => emptyFiles case Some(directory) => listChildren(directory, Some(isPackage)) - } ArraySeq.unsafeWrapArray(nestedDirs).map(f => PackageEntryImpl(inPackage.entryName(getName(f)))) - } - protected def files(inPackage: PackageName): Seq[FileEntryType] = { + protected def files(inPackage: PackageName): Seq[FileEntryType] = val dirForPackage = getDirectory(inPackage) - val files: Array[F] = dirForPackage match { + val files: Array[F] = dirForPackage match case None => emptyFiles case Some(directory) => listChildren(directory, Some(isMatchingFile)) - } files.iterator.map(f => createFileEntry(toAbstractFile(f))).toSeq - } - override def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = { + override def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = val dirForPackage = getDirectory(inPackage) - dirForPackage match { + dirForPackage match case None => case Some(directory) => - for (file <- listChildren(directory)) { + for (file <- listChildren(directory)) if (isPackage(file)) onPackageEntry(PackageEntryImpl(inPackage.entryName(getName(file)))) else if (isMatchingFile(file)) onClassesAndSources(createFileEntry(toAbstractFile(file))) - } - } - } -} -trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends DirectoryLookup[FileEntryType] { +trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends DirectoryLookup[FileEntryType]: type F = JFile protected def emptyFiles: Array[JFile] = Array.empty - protected def getSubDir(packageDirName: String): Option[JFile] = { + protected def getSubDir(packageDirName: String): Option[JFile] = val packageDir = new JFile(dir, packageDirName) if (packageDir.exists && packageDir.isDirectory) Some(packageDir) else None - } - protected def listChildren(dir: JFile, filter: Option[JFile => Boolean]): Array[JFile] = { - val listing = filter match { + protected def listChildren(dir: JFile, filter: Option[JFile => Boolean]): Array[JFile] = + val listing = filter match case Some(f) => dir.listFiles(mkFileFilter(f)) case None => dir.listFiles() - } - if (listing != null) { + if (listing != null) // Sort by file name for stable order of directory .class entries in package scope. // This gives stable results ordering of base type sequences for unrelated classes // with the same base type depth. @@ -112,9 +102,7 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo def compare(o1: JFile, o2: JFile) = o1.getName.compareTo(o2.getName) }) listing - } else Array() - } protected def getName(f: JFile): String = f.getName protected def toAbstractFile(f: JFile): AbstractFile = f.toPath.toPlainFile protected def isPackage(f: JFile): Boolean = f.isPackage @@ -123,38 +111,31 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo def asURLs: Seq[URL] = Seq(dir.toURI.toURL) def asClassPathStrings: Seq[String] = Seq(dir.getPath) -} -object JrtClassPath { +object JrtClassPath: import java.nio.file._, java.net.URI - def apply(release: Option[String]): Option[ClassPath] = { + def apply(release: Option[String]): Option[ClassPath] = import scala.util.Properties._ if (!isJavaAtLeast("9")) None - else { + else // Longer term we'd like an official API for this in the JDK // Discussion: http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/thread.html#11738 val currentMajorVersion: Int = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() - release match { + release match case Some(v) if v.toInt < currentMajorVersion => - try { + try val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") if (Files.notExists(ctSym)) None else Some(new CtSymClassPath(ctSym, v.toInt)) - } catch { + catch case NonFatal(_) => None - } case _ => - try { + try val fs = FileSystems.getFileSystem(URI.create("jrt:/")) Some(new JrtClassPath(fs)) - } catch { + catch case _: ProviderNotFoundException | _: FileSystemNotFoundException => None - } - } - } - } -} /** * Implementation `ClassPath` based on the JDK 9 encapsulated runtime modules (JEP-220) @@ -164,18 +145,17 @@ object JrtClassPath { * * The implementation assumes that no classes exist in the empty package. */ -final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths { +final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with NoSourcePaths: import java.nio.file.Path, java.nio.file._ type F = Path private val dir: Path = fs.getPath("/packages") // e.g. "java.lang" -> Seq("/modules/java.base") - private val packageToModuleBases: Map[String, Seq[Path]] = { + private val packageToModuleBases: Map[String, Seq[Path]] = val ps = Files.newDirectoryStream(dir).iterator().asScala def lookup(pack: Path): Seq[Path] = Files.list(pack).iterator().asScala.map(l => if (Files.isSymbolicLink(l)) Files.readSymbolicLink(l) else l).toList ps.map(p => (p.toString.stripPrefix("/packages/"), lookup(p))).toMap - } /** Empty string represents root package */ override private[dotty] def hasPackage(pkg: PackageName): Boolean = packageToModuleBases.contains(pkg.dottedString) @@ -201,19 +181,17 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No def findClassFile(className: String): Option[AbstractFile] = if (!className.contains(".")) None - else { + else val (inPackage, _) = separatePkgAndClassNames(className) packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap{ x => val file = x.resolve(FileUtils.dirPath(className) + ".class") if (Files.exists(file)) file.toPlainFile :: Nil else Nil }.take(1).toList.headOption - } -} /** * Implementation `ClassPath` based on the \$JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 */ -final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { +final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths: import java.nio.file.Path, java.nio.file._ private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null: ClassLoader) @@ -228,32 +206,27 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas private val rootsForRelease: List[Path] = roots.filter(root => fileNameMatchesRelease(root.getFileName.toString)) // e.g. "java.lang" -> Seq(/876/java/lang, /87/java/lang, /8/java/lang)) - private val packageIndex: scala.collection.Map[String, scala.collection.Seq[Path]] = { + private val packageIndex: scala.collection.Map[String, scala.collection.Seq[Path]] = val index = collection.mutable.AnyRefMap[String, collection.mutable.ListBuffer[Path]]() val isJava12OrHigher = scala.util.Properties.isJavaAtLeast("12") rootsForRelease.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach { p => val moduleNamePathElementCount = if (isJava12OrHigher) 1 else 0 - if (p.getNameCount > root.getNameCount + moduleNamePathElementCount) { + if (p.getNameCount > root.getNameCount + moduleNamePathElementCount) val packageDotted = p.subpath(moduleNamePathElementCount + root.getNameCount, p.getNameCount).toString.replace('/', '.') index.getOrElseUpdate(packageDotted, new collection.mutable.ListBuffer) += p - } }) index - } /** Empty string represents root package */ override private[dotty] def hasPackage(pkg: PackageName) = packageIndex.contains(pkg.dottedString) - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = packageIndex.keysIterator.filter(pack => packageContains(inPackage.dottedString, pack)).map(PackageEntryImpl(_)).toVector - } - private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = { + private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = if (inPackage.isRoot) Nil - else { + else val sigFiles = packageIndex.getOrElse(inPackage.dottedString, Nil).iterator.flatMap(p => Files.list(p).iterator.asScala.filter(_.getFileName.toString.endsWith(".sig"))) sigFiles.map(f => ClassFileEntryImpl(f.toPlainFile)).toVector - } - } override private[dotty] def list(inPackage: PackageName): ClassPathEntries = if (inPackage.isRoot) ClassPathEntries(packages(inPackage), Nil) @@ -261,37 +234,31 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas def asURLs: Seq[URL] = Nil def asClassPathStrings: Seq[String] = Nil - def findClassFile(className: String): Option[AbstractFile] = { + def findClassFile(className: String): Option[AbstractFile] = if (!className.contains(".")) None - else { + else val (inPackage, classSimpleName) = separatePkgAndClassNames(className) packageIndex.getOrElse(inPackage, Nil).iterator.flatMap { p => val path = p.resolve(classSimpleName + ".sig") if (Files.exists(path)) path.toPlainFile :: Nil else Nil }.take(1).toList.headOption - } - } -} -case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { +case class DirectoryClassPath(dir: JFile) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths: override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply - def findClassFile(className: String): Option[AbstractFile] = { + def findClassFile(className: String): Option[AbstractFile] = val relativePath = FileUtils.dirPath(className) val classFile = new JFile(dir, relativePath + ".class") - if (classFile.exists) { + if (classFile.exists) Some(classFile.toPath.toPlainFile) - } else None - } protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) protected def isMatchingFile(f: JFile): Boolean = f.isClass private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) -} -case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths { +case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFileEntryImpl] with NoClassPaths: def asSourcePathString: String = asClassPathString protected def createFileEntry(file: AbstractFile): SourceFileEntryImpl = SourceFileEntryImpl(file) @@ -299,14 +266,12 @@ case class DirectorySourcePath(dir: JFile) extends JFileDirectoryLookup[SourceFi override def findClass(className: String): Option[ClassRepresentation] = findSourceFile(className) map SourceFileEntryImpl.apply - private def findSourceFile(className: String): Option[AbstractFile] = { + private def findSourceFile(className: String): Option[AbstractFile] = val relativePath = FileUtils.dirPath(className) val sourceFile = LazyList("scala", "java") .map(ext => new JFile(dir, relativePath + "." + ext)) .collectFirst { case file if file.exists() => file } sourceFile.map(_.toPath.toPlainFile) - } private[dotty] def sources(inPackage: PackageName): Seq[SourceFileEntry] = files(inPackage) -} diff --git a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala index d6fa6fb78d07..979107172037 100644 --- a/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala +++ b/compiler/src/dotty/tools/dotc/classpath/FileUtils.scala @@ -13,8 +13,8 @@ import dotty.tools.io.AbstractFile /** * Common methods related to Java files and abstract files used in the context of classpath */ -object FileUtils { - extension (file: AbstractFile) { +object FileUtils: + extension (file: AbstractFile) def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) def isClass: Boolean = !file.isDirectory && file.hasExtension("class") && !file.name.endsWith("$class.class") @@ -30,14 +30,12 @@ object FileUtils { * and returning given default value in other case */ def toURLs(default: => Seq[URL] = Seq.empty): Seq[URL] = if (file.file == null) default else Seq(file.toURL) - } - extension (file: JFile) { + extension (file: JFile) def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) def isClass: Boolean = file.isFile && file.getName.endsWith(".class") && !file.getName.endsWith("$class.class") // FIXME: drop last condition when we stop being compatible with Scala 2.11 - } private val SUFFIX_CLASS = ".class" private val SUFFIX_SCALA = ".scala" @@ -78,7 +76,5 @@ object FileUtils { def mayBeValidPackage(dirName: String): Boolean = (dirName != "META-INF") && (dirName != "") && (dirName.charAt(0) != '.') - def mkFileFilter(f: JFile => Boolean): FileFilter = new FileFilter { + def mkFileFilter(f: JFile => Boolean): FileFilter = new FileFilter: def accept(pathname: JFile): Boolean = f(pathname) - } -} diff --git a/compiler/src/dotty/tools/dotc/classpath/PackageNameUtils.scala b/compiler/src/dotty/tools/dotc/classpath/PackageNameUtils.scala index ea7412f15d8a..44c3c7e2822e 100644 --- a/compiler/src/dotty/tools/dotc/classpath/PackageNameUtils.scala +++ b/compiler/src/dotty/tools/dotc/classpath/PackageNameUtils.scala @@ -8,19 +8,18 @@ import dotty.tools.io.ClassPath.RootPackage /** * Common methods related to package names represented as String */ -object PackageNameUtils { +object PackageNameUtils: /** * @param fullClassName full class name with package * @return (package, simple class name) */ - inline def separatePkgAndClassNames(fullClassName: String): (String, String) = { + inline def separatePkgAndClassNames(fullClassName: String): (String, String) = val lastDotIndex = fullClassName.lastIndexOf('.') if (lastDotIndex == -1) (RootPackage, fullClassName) else (fullClassName.substring(0, lastDotIndex).nn, fullClassName.substring(lastDotIndex + 1).nn) - } def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "." @@ -29,9 +28,7 @@ object PackageNameUtils { * - `packageContains("scala", "scala.collection")` * - `packageContains("", "scala")` */ - def packageContains(inPackage: String, packageDottedName: String) = { + def packageContains(inPackage: String, packageDottedName: String) = if (packageDottedName.contains(".")) packageDottedName.startsWith(inPackage) && packageDottedName.lastIndexOf('.') == inPackage.length else inPackage == "" - } -} diff --git a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala index e750d9ccacc0..04f26d9961dd 100644 --- a/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala +++ b/compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala @@ -7,29 +7,26 @@ import dotty.tools.io.{AbstractFile, VirtualDirectory} import FileUtils._ import java.net.{URI, URL} -case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { +case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths: type F = AbstractFile // From AbstractFileClassLoader - private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = { + private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = var file: AbstractFile = base val dirParts = pathParts.init.iterator - while (dirParts.hasNext) { + while (dirParts.hasNext) val dirPart = dirParts.next file = file.lookupName(dirPart, directory = true) if (file == null) return null - } file.lookupName(pathParts.last, directory = directory) - } protected def emptyFiles: Array[AbstractFile] = Array.empty protected def getSubDir(packageDirName: String): Option[AbstractFile] = Option(lookupPath(dir)(packageDirName.split(java.io.File.separator).toIndexedSeq, directory = true)) - protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile => Boolean] = None): Array[F] = filter match { + protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile => Boolean] = None): Array[F] = filter match case Some(f) => dir.iterator.filter(f).toArray case _ => dir.toArray - } def getName(f: AbstractFile): String = f.name def toAbstractFile(f: AbstractFile): AbstractFile = f def isPackage(f: AbstractFile): Boolean = f.isPackage @@ -40,13 +37,11 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl.apply - def findClassFile(className: String): Option[AbstractFile] = { + def findClassFile(className: String): Option[AbstractFile] = val relativePath = FileUtils.dirPath(className) + ".class" Option(lookupPath(dir)(relativePath.split(java.io.File.separator).toIndexedSeq, directory = false)) - } private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file) protected def isMatchingFile(f: AbstractFile): Boolean = f.isClass -} diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala index 865f95551a0b..8563bce47048 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipAndJarFileLookupFactory.scala @@ -21,7 +21,7 @@ import FileUtils._ * It allows us to e.g. reduce significantly memory used by PresentationCompilers in Scala IDE * when there are a lot of projects having a lot of common dependencies. */ -sealed trait ZipAndJarFileLookupFactory { +sealed trait ZipAndJarFileLookupFactory: private val cache = new FileBasedCache[ClassPath] def create(zipFile: AbstractFile)(using Context): ClassPath = @@ -33,33 +33,29 @@ sealed trait ZipAndJarFileLookupFactory { private def createUsingCache(zipFile: AbstractFile, release: Option[String]): ClassPath = cache.getOrCreate(zipFile.file.toPath, () => createForZipFile(zipFile, release)) -} /** * Manages creation of classpath for class files placed in zip and jar files. * It should be the only way of creating them as it provides caching. */ -object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { +object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory: private case class ZipArchiveClassPath(zipFile: File, override val release: Option[String]) extends ZipArchiveFileLookup[ClassFileEntryImpl] - with NoSourcePaths { + with NoSourcePaths: - override def findClassFile(className: String): Option[AbstractFile] = { + override def findClassFile(className: String): Option[AbstractFile] = val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) file(PackageName(pkg), simpleClassName + ".class").map(_.file) - } // This method is performance sensitive as it is used by SBT's ExtractDependencies phase. - override def findClass(className: String): Option[ClassRepresentation] = { + override def findClass(className: String): Option[ClassRepresentation] = val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) file(PackageName(pkg), simpleClassName + ".class") - } override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = files(inPackage) override protected def createFileEntry(file: FileZipArchive#Entry): ClassFileEntryImpl = ClassFileEntryImpl(file) override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isClass - } /** * This type of classpath is closely related to the support for JSR-223. @@ -68,11 +64,10 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry: * Name: scala/Function2$mcFJD$sp.class */ - private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths { - override def findClassFile(className: String): Option[AbstractFile] = { + private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths: + override def findClassFile(className: String): Option[AbstractFile] = val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) classes(PackageName(pkg)).find(_.name == simpleClassName).map(_.file) - } override def asClassPathStrings: Seq[String] = Seq(file.path) @@ -93,7 +88,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { * when we need subpackages of a given package or its classes, we traverse once and cache only packages. * Classes for given package can be then easily loaded when they are needed. */ - private lazy val cachedPackages: util.HashMap[String, PackageFileInfo] = { + private lazy val cachedPackages: util.HashMap[String, PackageFileInfo] = val packages = util.HashMap[String, PackageFileInfo]() def getSubpackages(dir: AbstractFile): List[AbstractFile] = @@ -102,7 +97,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { @tailrec def traverse(packagePrefix: String, filesForPrefix: List[AbstractFile], - subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match { + subpackagesQueue: collection.mutable.Queue[PackageInfo]): Unit = filesForPrefix match case pkgFile :: remainingFiles => val subpackages = getSubpackages(pkgFile) val fullPkgName = packagePrefix + pkgFile.name @@ -114,56 +109,48 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { val PackageInfo(packagePrefix, filesForPrefix) = subpackagesQueue.dequeue() traverse(packagePrefix, filesForPrefix, subpackagesQueue) case _ => - } val subpackages = getSubpackages(file) packages(ClassPath.RootPackage) = PackageFileInfo(file, subpackages) traverse(ClassPath.RootPackage, subpackages, collection.mutable.Queue()) packages - } - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = cachedPackages.get(inPackage.dottedString) match { + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = cachedPackages.get(inPackage.dottedString) match case None => Seq.empty case Some(PackageFileInfo(_, subpackages)) => subpackages.map(packageFile => PackageEntryImpl(inPackage.entryName(packageFile.name))) - } - override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = cachedPackages.get(inPackage.dottedString) match { + override private[dotty] def classes(inPackage: PackageName): Seq[ClassFileEntry] = cachedPackages.get(inPackage.dottedString) match case None => Seq.empty case Some(PackageFileInfo(pkg, _)) => (for (file <- pkg if file.isClass) yield ClassFileEntryImpl(file)).toSeq - } override private[dotty] def hasPackage(pkg: PackageName) = cachedPackages.contains(pkg.dottedString) override private[dotty] def list(inPackage: PackageName): ClassPathEntries = ClassPathEntries(packages(inPackage), classes(inPackage)) - } - private object ManifestResourcesClassPath { + private object ManifestResourcesClassPath: case class PackageFileInfo(packageFile: AbstractFile, subpackages: Seq[AbstractFile]) case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) - } override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) else ZipArchiveClassPath(zipFile.file, release) - private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match { + private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match case manifestRes: ManifestResources => ManifestResourcesClassPath(manifestRes) case _ => val errorMsg = s"Abstract files which don't have an underlying file and are not ManifestResources are not supported. There was $zipFile" throw new IllegalArgumentException(errorMsg) - } -} /** * Manages creation of classpath for source files placed in zip and jar files. * It should be the only way of creating them as it provides caching. */ -object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { +object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory: private case class ZipArchiveSourcePath(zipFile: File) extends ZipArchiveFileLookup[SourceFileEntryImpl] - with NoClassPaths { + with NoClassPaths: def release: Option[String] = None @@ -173,33 +160,27 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { override protected def createFileEntry(file: FileZipArchive#Entry): SourceFileEntryImpl = SourceFileEntryImpl(file) override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource - } override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = ZipArchiveSourcePath(zipFile.file) -} -final class FileBasedCache[T] { +final class FileBasedCache[T]: private case class Stamp(lastModified: FileTime, fileKey: Object) private val cache = collection.mutable.Map.empty[java.nio.file.Path, (Stamp, T)] - def getOrCreate(path: java.nio.file.Path, create: () => T): T = cache.synchronized { + def getOrCreate(path: java.nio.file.Path, create: () => T): T = cache.synchronized: val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) val lastModified = attrs.lastModifiedTime() // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp val fileKey = attrs.fileKey() val stamp = Stamp(lastModified, fileKey) - cache.get(path) match { + cache.get(path) match case Some((cachedStamp, cached)) if cachedStamp == stamp => cached case _ => val value = create() cache.put(path, (stamp, value)) value - } - } - def clear(): Unit = cache.synchronized { + def clear(): Unit = cache.synchronized: // TODO support closing // cache.valuesIterator.foreach(_.close()) cache.clear() - } -} diff --git a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala index e241feee8244..807ab9284be6 100644 --- a/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala +++ b/compiler/src/dotty/tools/dotc/classpath/ZipArchiveFileLookup.scala @@ -17,7 +17,7 @@ import dotty.tools.io.{EfficientClassPath, ClassRepresentation} * It provides common logic for classes handling class and source files. * It's aware of things like e.g. META-INF directory which is correctly skipped. */ -trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath { +trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends EfficientClassPath: val zipFile: File def release: Option[String] @@ -28,13 +28,12 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends Efficie private val archive = new FileZipArchive(zipFile.toPath, release) - override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = { + override private[dotty] def packages(inPackage: PackageName): Seq[PackageEntry] = for { dirEntry <- findDirEntry(inPackage).toSeq entry <- dirEntry.iterator if entry.isPackage } yield PackageEntryImpl(inPackage.entryName(entry.name)) - } protected def files(inPackage: PackageName): Seq[FileEntryType] = for { @@ -53,20 +52,17 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends Efficie override def hasPackage(pkg: PackageName) = findDirEntry(pkg).isDefined def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit = - findDirEntry(inPackage) match { + findDirEntry(inPackage) match case Some(dirEntry) => - for (entry <- dirEntry.iterator) { + for (entry <- dirEntry.iterator) if (entry.isPackage) onPackageEntry(PackageEntryImpl(inPackage.entryName(entry.name))) else if (isRequiredFileType(entry)) onClassesAndSources(createFileEntry(entry)) - } case None => - } private def findDirEntry(pkg: PackageName): Option[archive.DirEntry] = archive.allDirs.get(pkg.dirPathTrailingSlashJar) protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType protected def isRequiredFileType(file: AbstractFile): Boolean -} diff --git a/compiler/src/dotty/tools/dotc/config/CliCommand.scala b/compiler/src/dotty/tools/dotc/config/CliCommand.scala index 914df040fbf7..ba79cabe22be 100644 --- a/compiler/src/dotty/tools/dotc/config/CliCommand.scala +++ b/compiler/src/dotty/tools/dotc/config/CliCommand.scala @@ -44,10 +44,9 @@ trait CliCommand: def distill(args: Array[String], sg: Settings.SettingGroup)(ss: SettingsState = sg.defaultState)(using Context): ArgsSummary = // expand out @filename to the contents of that filename - def expandedArguments = args.toList flatMap { + def expandedArguments = args.toList flatMap: case x if x startsWith "@" => CommandLineParser.expandArg(x) case x => List(x) - } sg.processArguments(expandedArguments, processAll = true, settingsState = ss) end distill @@ -94,10 +93,9 @@ trait CliCommand: protected def shortHelp(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): String = s.description.linesIterator.next() protected def isHelping(s: Setting[?])(using settings: ConcreteSettings)(using SettingsState): Boolean = - cond(s.value) { + cond(s.value): case ss: List[?] if s.isMultivalue => ss.contains("help") case s: String => "help" == s - } /** Messages explaining usage and options */ protected def usageMessage(using settings: ConcreteSettings)(using SettingsState) = diff --git a/compiler/src/dotty/tools/dotc/config/Config.scala b/compiler/src/dotty/tools/dotc/config/Config.scala index 247fa28efbda..072a75d905bc 100644 --- a/compiler/src/dotty/tools/dotc/config/Config.scala +++ b/compiler/src/dotty/tools/dotc/config/Config.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.config -object Config { +object Config: inline val cacheMembersNamed = true inline val cacheAsSeenFrom = true @@ -246,4 +246,3 @@ object Config { * cases, though. */ inline val ccAllowUnsoundMaps = false -} diff --git a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala index 2b2f35e49451..83cf0f09e8ad 100644 --- a/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala +++ b/compiler/src/dotty/tools/dotc/config/JavaPlatform.scala @@ -9,16 +9,15 @@ import Symbols._, Types._, Contexts._, StdNames._ import Flags._ import transform.ExplicitOuter, transform.SymUtils._ -class JavaPlatform extends Platform { +class JavaPlatform extends Platform: private var currentClassPath: Option[ClassPath] = None - def classPath(using Context): ClassPath = { + def classPath(using Context): ClassPath = if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver().result) val cp = currentClassPath.get cp - } // The given symbol is a method with the right name and signature to be a runnable java program. def isMainMethod(sym: Symbol)(using Context): Boolean = @@ -28,12 +27,11 @@ class JavaPlatform extends Platform { }) /** Update classpath with a substituted subentry */ - def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = currentClassPath.get match { + def updateClassPath(subst: Map[ClassPath, ClassPath]): Unit = currentClassPath.get match case AggregateClassPath(entries) => currentClassPath = Some(AggregateClassPath(entries map (e => subst.getOrElse(e, e)))) case cp: ClassPath => currentClassPath = Some(subst.getOrElse(cp, cp)) - } def rootLoader(root: TermSymbol)(using Context): SymbolLoader = new SymbolLoaders.PackageLoader(root, classPath) @@ -50,7 +48,7 @@ class JavaPlatform extends Platform { * to anything but other booleans, but it should be present in * case this is put to other uses. */ - def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean = { + def isMaybeBoxed(sym: ClassSymbol)(using Context): Boolean = val d = defn import d._ (sym == ObjectClass) || @@ -59,11 +57,9 @@ class JavaPlatform extends Platform { (sym derivesFrom BoxedNumberClass) || (sym derivesFrom BoxedCharClass) || (sym derivesFrom BoxedBooleanClass) - } def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = true def newClassLoader(bin: AbstractFile)(using Context): SymbolLoader = new ClassfileLoader(bin) -} diff --git a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala index 0411c5604768..cf5f8d04517a 100644 --- a/compiler/src/dotty/tools/dotc/config/OutputDirs.scala +++ b/compiler/src/dotty/tools/dotc/config/OutputDirs.scala @@ -12,7 +12,7 @@ import io._ * single output location, but tools may use this functionality * to set output location per source directory. */ -class OutputDirs { +class OutputDirs: /** Pairs of source directory - destination directory. */ private var outputDirs: List[(AbstractFile, AbstractFile)] = Nil @@ -41,10 +41,9 @@ class OutputDirs { /** Set the single output directory. From now on, all files will * be dumped in there, regardless of previous calls to 'add'. */ - def setSingleOutput(outDir: String): Unit = { + def setSingleOutput(outDir: String): Unit = val dst = AbstractFile.getDirectory(outDir) setSingleOutput(checkDir(dst, outDir, true)) - } def getSingleOutput: Option[AbstractFile] = singleOutDir @@ -54,31 +53,27 @@ class OutputDirs { def setSingleOutput(dir: AbstractFile): Unit = singleOutDir = Some(dir) - def add(src: AbstractFile, dst: AbstractFile): Unit = { + def add(src: AbstractFile, dst: AbstractFile): Unit = singleOutDir = None outputDirs ::= ((src, dst)) - } /** Return the list of source-destination directory pairs. */ def outputs: List[(AbstractFile, AbstractFile)] = outputDirs /** Return the output directory for the given file. */ - def outputDirFor(src: AbstractFile): AbstractFile = { + def outputDirFor(src: AbstractFile): AbstractFile = def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = src.path.startsWith(srcDir.path) - singleOutDir match { + singleOutDir match case Some(d) => d case None => - (outputs find (isBelow _).tupled) match { + (outputs find (isBelow _).tupled) match case Some((_, d)) => d case _ => throw new FatalError("Could not find an output directory for " + src.path + " in " + outputs) - } - } - } /** Return the source file path(s) which correspond to the given * classfile path and SourceFile attribute value, subject to the @@ -97,21 +92,16 @@ class OutputDirs { * output directory there will be two or more candidate source file * paths. */ - def srcFilesFor(classFile: AbstractFile, srcPath: String): List[AbstractFile] = { + def srcFilesFor(classFile: AbstractFile, srcPath: String): List[AbstractFile] = def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = classFile.path.startsWith(outDir.path) - singleOutDir match { + singleOutDir match case Some(d) => - d match { + d match case _: VirtualDirectory | _: io.ZipArchive => Nil case _ => List(d.lookupPathUnchecked(srcPath, false)) - } case None => - (outputs filter (isBelow _).tupled) match { + (outputs filter (isBelow _).tupled) match case Nil => Nil case matches => matches.map(_._1.lookupPathUnchecked(srcPath, false)) - } - } - } -} diff --git a/compiler/src/dotty/tools/dotc/config/PathResolver.scala b/compiler/src/dotty/tools/dotc/config/PathResolver.scala index 8b4eedb0e9d2..1283577ae264 100644 --- a/compiler/src/dotty/tools/dotc/config/PathResolver.scala +++ b/compiler/src/dotty/tools/dotc/config/PathResolver.scala @@ -13,7 +13,7 @@ import core.Contexts._ import Settings._ import dotty.tools.io.File -object PathResolver { +object PathResolver: // Imports property/environment functions which suppress // security exceptions. @@ -27,15 +27,14 @@ object PathResolver { /** pretty print class path */ - def ppcp(s: String): String = split(s) match { + def ppcp(s: String): String = split(s) match case Nil => "" case Seq(x) => x case xs => xs.map("\n" + _).mkString - } /** Values found solely by inspecting environment or property variables. */ - object Environment { + object Environment: private def searchForBootClasspath = ( systemProperties find (_._1 endsWith ".boot.class.path") map (_._2) getOrElse "" ) @@ -65,12 +64,11 @@ object PathResolver { | javaUserClassPath = ${ppcp(javaUserClassPath)} | scalaExtDirs = ${ppcp(scalaExtDirs)} |}""".trim.stripMargin - } /** Default values based on those in Environment as interpreted according * to the path resolution specification. */ - object Defaults { + object Defaults: def scalaSourcePath: String = Environment.sourcePathEnv def javaBootClassPath: String = Environment.javaBootClassPath def javaUserClassPath: String = Environment.javaUserClassPath @@ -127,67 +125,57 @@ object PathResolver { scalaLibDirFound, scalaLibFound, ppcp(scalaBootClassPath), ppcp(scalaPluginPath) ) - } - def fromPathString(path: String)(using Context): ClassPath = { + def fromPathString(path: String)(using Context): ClassPath = val settings = ctx.settings.classpath.update(path) - inContext(ctx.fresh.setSettings(settings)) { + inContext(ctx.fresh.setSettings(settings)): new PathResolver().result - } - } /** Show values in Environment and Defaults when no argument is provided. * Otherwise, show values in Calculated as if those options had been given * to a scala runner. */ def main(args: Array[String]): Unit = - if (args.isEmpty) { + if (args.isEmpty) println(Environment) println(Defaults) - } - else inContext(ContextBase().initialCtx) { + else inContext(ContextBase().initialCtx): val ArgsSummary(sstate, rest, errors, warnings) = ctx.settings.processArguments(args.toList, true, ctx.settingsState) errors.foreach(println) - val pr = inContext(ctx.fresh.setSettings(sstate)) { + val pr = inContext(ctx.fresh.setSettings(sstate)): new PathResolver() - } println(" COMMAND: 'scala %s'".format(args.mkString(" "))) println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) - pr.result match { + pr.result match case cp: AggregateClassPath => println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") - } - } -} import PathResolver.{Defaults, ppcp} -class PathResolver(using c: Context) { +class PathResolver(using c: Context): import c.base.settings private val classPathFactory = new ClassPathFactory private def cmdLineOrElse(name: String, alt: String) = - commandLineFor(name) match { + commandLineFor(name) match case Some("") | None => alt case Some(x) => x - } - private def commandLineFor(s: String): Option[String] = condOpt(s) { + private def commandLineFor(s: String): Option[String] = condOpt(s): case "javabootclasspath" => settings.javabootclasspath.value case "javaextdirs" => settings.javaextdirs.value case "bootclasspath" => settings.bootclasspath.value case "extdirs" => settings.extdirs.value case "classpath" | "cp" => settings.classpath.value case "sourcepath" => settings.sourcepath.value - } /** Calculated values based on any given command line options, falling back on * those in Defaults. */ - object Calculated { + object Calculated: def scalaHome: String = Defaults.scalaHome def useJavaClassPath: Boolean = settings.usejavacp.value || Defaults.useJavaClassPath def javaBootClassPath: String = cmdLineOrElse("javabootclasspath", Defaults.javaBootClassPath) @@ -245,14 +233,13 @@ class PathResolver(using c: Context) { ppcp(scalaBootClassPath), ppcp(scalaExtDirs), ppcp(userClassPath), ppcp(sourcePath) ) - } def containers: List[ClassPath] = Calculated.containers - lazy val result: ClassPath = { + lazy val result: ClassPath = val cp = AggregateClassPath(containers.toIndexedSeq) - if (settings.YlogClasspath.value) { + if (settings.YlogClasspath.value) Console.println("Classpath built from " + settings.toConciseString(ctx.settingsState)) Console.println("Defaults: " + PathResolver.Defaults) Console.println("Calculated: " + Calculated) @@ -260,9 +247,6 @@ class PathResolver(using c: Context) { val xs = (Calculated.basis drop 2).flatten.distinct println("After java boot/extdirs classpath has %d entries:" format xs.size) xs foreach (x => println(" " + x)) - } cp - } def asURLs: Seq[java.net.URL] = result.asURLs -} diff --git a/compiler/src/dotty/tools/dotc/config/Platform.scala b/compiler/src/dotty/tools/dotc/config/Platform.scala index 0faacf1bcebb..b82e4d14b18f 100644 --- a/compiler/src/dotty/tools/dotc/config/Platform.scala +++ b/compiler/src/dotty/tools/dotc/config/Platform.scala @@ -10,7 +10,7 @@ import core.Flags.Module /** The platform dependent pieces of Global. */ -abstract class Platform { +abstract class Platform: /** The root symbol loader. */ def rootLoader(root: TermSymbol)(using Context): SymbolLoader @@ -43,4 +43,3 @@ abstract class Platform { final def hasMainMethod(sym: Symbol)(using Context): Boolean = sym.info.member(nme.main).hasAltWith(d => isMainMethod(d.symbol) && (sym.is(Module) || d.symbol.isStatic)) -} diff --git a/compiler/src/dotty/tools/dotc/config/Printers.scala b/compiler/src/dotty/tools/dotc/config/Printers.scala index 63d616e1ce3d..a7a2604fe61a 100644 --- a/compiler/src/dotty/tools/dotc/config/Printers.scala +++ b/compiler/src/dotty/tools/dotc/config/Printers.scala @@ -1,14 +1,12 @@ package dotty.tools.dotc.config -object Printers { +object Printers: - class Printer { + class Printer: def println(msg: => String): Unit = System.out.nn.println(msg) - } - object noPrinter extends Printer { + object noPrinter extends Printer: inline override def println(msg: => String): Unit = () - } val default = new Printer @@ -50,4 +48,3 @@ object Printers { val typr = noPrinter val unapp = noPrinter val variances = noPrinter -} diff --git a/compiler/src/dotty/tools/dotc/config/Properties.scala b/compiler/src/dotty/tools/dotc/config/Properties.scala index 1e9cc82112af..91643c39d393 100644 --- a/compiler/src/dotty/tools/dotc/config/Properties.scala +++ b/compiler/src/dotty/tools/dotc/config/Properties.scala @@ -11,16 +11,15 @@ import java.util.jar.Attributes.{ Name => AttributeName } import java.nio.charset.StandardCharsets /** Loads `library.properties` from the jar. */ -object Properties extends PropertiesTrait { +object Properties extends PropertiesTrait: protected def propCategory: String = "compiler" protected def pickJarBasedOn: Class[PropertiesTrait] = classOf[PropertiesTrait] /** Scala manifest attributes. */ @sharable val ScalaCompilerVersion: AttributeName = new AttributeName("Scala-Compiler-Version") -} -trait PropertiesTrait { +trait PropertiesTrait: protected def propCategory: String // specializes the remainder of the values protected def pickJarBasedOn: Class[?] // props file comes from jar containing this @@ -28,14 +27,13 @@ trait PropertiesTrait { protected val propFilename: String = "/" + propCategory + ".properties" /** The loaded properties */ - @sharable protected lazy val scalaProps: java.util.Properties = { + @sharable protected lazy val scalaProps: java.util.Properties = val props = new java.util.Properties val stream = pickJarBasedOn getResourceAsStream propFilename if (stream ne null) quietlyDispose(props load stream, stream.close) props - } private def quietlyDispose(action: => Unit, disposal: => Unit) = try { action } @@ -69,7 +67,7 @@ trait PropertiesTrait { /** The version number of the jar this was loaded from, * or `"(unknown)"` if it cannot be determined. */ - val simpleVersionString: String = { + val simpleVersionString: String = val v = scalaPropOrElse("version.number", "(unknown)") v + ( if (v.contains("SNAPSHOT") || v.contains("NIGHTLY")) @@ -77,7 +75,6 @@ trait PropertiesTrait { else "" ) - } /** The version number of the jar this was loaded from plus `"version "` prefix, * or `"version (unknown)"` if it cannot be determined. @@ -139,4 +136,3 @@ trait PropertiesTrait { def versionMsg: String = "Scala %s %s -- %s".format(propCategory, versionString, copyrightString) def scalaCmd: String = if (isWin) "scala.bat" else "scala" def scalacCmd: String = if (isWin) "scalac.bat" else "scalac" -} diff --git a/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala b/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala index 0275e0d6a227..1e750c86b48d 100644 --- a/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala +++ b/compiler/src/dotty/tools/dotc/config/SJSPlatform.scala @@ -6,13 +6,12 @@ import Symbols._ import dotty.tools.backend.sjs.JSDefinitions -object SJSPlatform { +object SJSPlatform: /** The `SJSPlatform` for the current context. */ def sjsPlatform(using Context): SJSPlatform = ctx.platform.asInstanceOf[SJSPlatform] -} -class SJSPlatform()(using Context) extends JavaPlatform { +class SJSPlatform()(using Context) extends JavaPlatform: /** Scala.js-specific definitions. */ val jsDefinitions: JSDefinitions = new JSDefinitions() @@ -32,4 +31,3 @@ class SJSPlatform()(using Context) extends JavaPlatform { */ override def shouldReceiveJavaSerializationMethods(sym: ClassSymbol)(using Context): Boolean = !sym.isSubClass(jsDefinitions.JSAnyClass) -} diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala index 0ddde23dc39f..1ff43b435f3b 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala @@ -37,7 +37,7 @@ object ScalaSettings: def defaultClasspath: String = sys.env.getOrElse("CLASSPATH", ".") - def defaultPageWidth: Int = { + def defaultPageWidth: Int = val defaultWidth = 80 val columnsVar = System.getenv("COLUMNS") if columnsVar != null then columnsVar.toInt @@ -47,7 +47,6 @@ object ScalaSettings: ansiconVar.substring(0, ansiconVar.indexOf("x")).toInt else defaultWidth else defaultWidth - } trait AllScalaSettings extends CommonScalaSettings, PluginSettings, VerboseSettings, WarningSettings, XSettings, YSettings: self: SettingGroup => @@ -300,10 +299,9 @@ private sealed trait XSettings: choices = List("true", "junit", "false"), default = "true") - object mixinForwarderChoices { + object mixinForwarderChoices: def isTruthy(using Context) = XmixinForceForwarders.value == "true" def isAtLeastJunit(using Context) = isTruthy || XmixinForceForwarders.value == "junit" - } val XmacroSettings: Setting[List[String]] = MultiStringSetting("-Xmacro-settings", "setting1,setting2,..settingN", "List of settings which exposed to the macros") end XSettings diff --git a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala index 7fdf57478f1a..cdf60aafaeee 100644 --- a/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala +++ b/compiler/src/dotty/tools/dotc/config/ScalaVersion.scala @@ -12,21 +12,18 @@ import scala.util.{Try, Success, Failure} * Represents a single Scala version in a manner that * supports easy comparison and sorting. */ -sealed abstract class ScalaVersion extends Ordered[ScalaVersion] { +sealed abstract class ScalaVersion extends Ordered[ScalaVersion]: def unparse: String -} /** * A scala version that sorts higher than all actual versions */ -@sharable case object NoScalaVersion extends ScalaVersion { +@sharable case object NoScalaVersion extends ScalaVersion: def unparse: String = "none" - def compare(that: ScalaVersion): Int = that match { + def compare(that: ScalaVersion): Int = that match case NoScalaVersion => 0 case _ => 1 - } -} /** * A specific Scala version, not one of the magic min/max versions. An SpecificScalaVersion @@ -34,10 +31,10 @@ sealed abstract class ScalaVersion extends Ordered[ScalaVersion] { * final, release candidate, milestone, and development builds. The build argument is used * to segregate builds */ -case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion { +case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBuild) extends ScalaVersion: def unparse: String = s"${major}.${minor}.${rev}.${build.unparse}" - def compare(that: ScalaVersion): Int = that match { + def compare(that: ScalaVersion): Int = that match case SpecificScalaVersion(thatMajor, thatMinor, thatRev, thatBuild) => // this could be done more cleanly by importing scala.math.Ordering.Implicits, but we have to do these // comparisons a lot so I'm using brute force direct style code @@ -50,31 +47,27 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu else build compare thatBuild case AnyScalaVersion => 1 case NoScalaVersion => -1 - } -} /** * A Scala version that sorts lower than all actual versions */ -@sharable case object AnyScalaVersion extends ScalaVersion { +@sharable case object AnyScalaVersion extends ScalaVersion: def unparse: String = "any" - def compare(that: ScalaVersion): Int = that match { + def compare(that: ScalaVersion): Int = that match case AnyScalaVersion => 0 case _ => -1 - } -} /** * Methods for parsing ScalaVersions */ -@sharable object ScalaVersion { +@sharable object ScalaVersion: private val dot = "\\." private val dash = "\\-" private def not(s:String) = s"[^${s}]" private val R = s"((${not(dot)}*)(${dot}(${not(dot)}*)(${dot}(${not(dash)}*)(${dash}(.*))?)?)?)".r - def parse(versionString : String): Try[ScalaVersion] = { + def parse(versionString : String): Try[ScalaVersion] = def failure = Failure(new NumberFormatException( s"There was a problem parsing ${versionString}. " + "Versions should be in the form major[.minor[.revision]] " + @@ -82,59 +75,52 @@ case class SpecificScalaVersion(major: Int, minor: Int, rev: Int, build: ScalaBu "The minor and revision parts are optional." )) - def toInt(s: String) = s match { + def toInt(s: String) = s match case null | "" => 0 case _ => s.toInt - } def isInt(s: String) = Try(toInt(s)).isSuccess import ScalaBuild._ - def toBuild(s: String) = s match { + def toBuild(s: String) = s match case null | "FINAL" => Final case s if (s.toUpperCase.startsWith("RC") && isInt(s.substring(2))) => RC(toInt(s.substring(2))) case s if (s.toUpperCase.startsWith("M") && isInt(s.substring(1))) => Milestone(toInt(s.substring(1))) case _ => Development(s) - } - try versionString match { + try versionString match case "" | "any" => Success(AnyScalaVersion) case "none" => Success(NoScalaVersion) case R(_, majorS, _, minorS, _, revS, _, buildS) => Success(SpecificScalaVersion(toInt(majorS), toInt(minorS), toInt(revS), toBuild(buildS))) case _ => failure - } - catch { + catch case e: NumberFormatException => failure - } - } /** * The version of the compiler running now */ val current: ScalaVersion = parse(util.Properties.versionNumberString).get -} /** * Represents the data after the dash in major.minor.rev-build */ -abstract class ScalaBuild extends Ordered[ScalaBuild] { +abstract class ScalaBuild extends Ordered[ScalaBuild]: /** * Return a version of this build information that can be parsed back into the * same ScalaBuild */ def unparse: String -} -object ScalaBuild { +object ScalaBuild: /** A development, test, nightly, snapshot or other "unofficial" build */ - case class Development(id: String) extends ScalaBuild { + case class Development(id: String) extends ScalaBuild: def unparse: String = s"-${id}" - def compare(that: ScalaBuild): Int = that match { + def compare(that: ScalaBuild): Int = that match // sorting two development builds based on id is reasonably valid for two versions created with the same schema // otherwise it's not correct, but since it's impossible to put a total ordering on development build versions // this is a pragmatic compromise @@ -142,47 +128,38 @@ object ScalaBuild { // assume a development build is newer than anything else, that's not really true, but good luck // mapping development build versions to other build types case _ => 1 - } - } /** A final build */ - case object Final extends ScalaBuild { + case object Final extends ScalaBuild: def unparse: String = "" - def compare(that: ScalaBuild): Int = that match { + def compare(that: ScalaBuild): Int = that match case Final => 0 // a final is newer than anything other than a development build or another final case Development(_) => -1 case _ => 1 - } - } /** A candidate for final release */ - case class RC(n: Int) extends ScalaBuild { + case class RC(n: Int) extends ScalaBuild: def unparse: String = s"-RC${n}" - def compare(that: ScalaBuild): Int = that match { + def compare(that: ScalaBuild): Int = that match // compare two rcs based on their RC numbers case RC(thatN) => n - thatN // an rc is older than anything other than a milestone or another rc case Milestone(_) => 1 case _ => -1 - } - } /** An intermediate release */ - case class Milestone(n: Int) extends ScalaBuild { + case class Milestone(n: Int) extends ScalaBuild: def unparse: String = s"-M${n}" - def compare(that: ScalaBuild): Int = that match { + def compare(that: ScalaBuild): Int = that match // compare two milestones based on their milestone numbers case Milestone(thatN) => n - thatN // a milestone is older than anything other than another milestone case _ => -1 - } - } -} diff --git a/compiler/src/dotty/tools/dotc/config/Settings.scala b/compiler/src/dotty/tools/dotc/config/Settings.scala index 34e5582e8a91..b3bfeb8d0af3 100644 --- a/compiler/src/dotty/tools/dotc/config/Settings.scala +++ b/compiler/src/dotty/tools/dotc/config/Settings.scala @@ -44,14 +44,13 @@ object Settings: sstate: SettingsState, arguments: List[String], errors: List[String], - warnings: List[String]) { + warnings: List[String]): def fail(msg: String): Settings.ArgsSummary = ArgsSummary(sstate, arguments.tail, errors :+ msg, warnings) def warn(msg: String): Settings.ArgsSummary = ArgsSummary(sstate, arguments.tail, errors, warnings :+ msg) - } case class Setting[T: ClassTag] private[Settings] ( name: String, @@ -62,7 +61,7 @@ object Settings: prefix: String = "", aliases: List[String] = Nil, depends: List[(Setting[?], Any)] = Nil, - propertyClass: Option[Class[?]] = None)(private[Settings] val idx: Int) { + propertyClass: Option[Class[?]] = None)(private[Settings] val idx: Int): private var changed: Boolean = false @@ -77,14 +76,13 @@ object Settings: def isMultivalue: Boolean = summon[ClassTag[T]] == ListTag def legalChoices: String = - choices match { + choices match case Some(xs) if xs.isEmpty => "" case Some(r: Range) => s"${r.head}..${r.last}" case Some(xs) => xs.mkString(", ") case None => "" - } - def tryToSet(state: ArgsSummary): ArgsSummary = { + def tryToSet(state: ArgsSummary): ArgsSummary = val ArgsSummary(sstate, arg :: args, errors, warnings) = state: @unchecked def update(value: Any, args: List[String]): ArgsSummary = var dangers = warnings @@ -132,7 +130,7 @@ object Settings: catch case _: NumberFormatException => fail(s"$argValue is not an integer argument for $name", args) - def doSet(argRest: String) = ((summon[ClassTag[T]], args): @unchecked) match { + def doSet(argRest: String) = ((summon[ClassTag[T]], args): @unchecked) match case (BooleanTag, _) => setBoolean(argRest, args) case (OptionTag, _) => @@ -143,8 +141,8 @@ object Settings: val strings = argRest.split(",").toList choices match case Some(valid) => strings.filterNot(valid.contains) match - case Nil => update(strings, args) - case invalid => fail(s"invalid choice(s) for $name: ${invalid.mkString(",")}", args) + case Nil => update(strings, args) + case invalid => fail(s"invalid choice(s) for $name: ${invalid.mkString(",")}", args) case _ => update(strings, args) case (StringTag, _) if argRest.nonEmpty || choices.exists(_.contains("")) => setString(argRest, args) @@ -156,22 +154,19 @@ object Settings: val isJar = path.extension == "jar" if (!isJar && !path.isDirectory) fail(s"'$arg' does not exist or is not a directory or .jar file", args) - else { + else val output = if (isJar) JarArchive.create(path) else new PlainDirectory(path) update(output, args) - } case (IntTag, args) if argRest.nonEmpty => setInt(argRest, args) case (IntTag, arg2 :: args2) => setInt(arg2, args2) case (VersionTag, _) => - ScalaVersion.parse(argRest) match { + ScalaVersion.parse(argRest) match case Success(v) => update(v, args) case Failure(ex) => fail(ex.getMessage, args) - } case (_, Nil) => missingArg - } def matches(argName: String) = (name :: aliases).exists(_ == argName) @@ -181,8 +176,6 @@ object Settings: doSet(arg.dropWhile(_ != ':').drop(1)) else state - } - } object Setting: extension [T](setting: Setting[T]) @@ -203,7 +196,7 @@ object Settings: s"\n- $name${if description.isEmpty() then "" else s" :\n\t${description.replace("\n","\n\t")}"}" end Setting - class SettingGroup { + class SettingGroup: private val _allSettings = new ArrayBuffer[Setting[?]] def allSettings: Seq[Setting[?]] = _allSettings.toSeq @@ -266,11 +259,10 @@ object Settings: def processArguments(arguments: List[String], processAll: Boolean, settingsState: SettingsState = defaultState): ArgsSummary = processArguments(ArgsSummary(settingsState, arguments, Nil, Nil), processAll, Nil) - def publish[T](settingf: Int => Setting[T]): Setting[T] = { + def publish[T](settingf: Int => Setting[T]): Setting[T] = val setting = settingf(_allSettings.length) _allSettings += setting setting - } def BooleanSetting(name: String, descr: String, initialValue: Boolean = false, aliases: List[String] = Nil): Setting[Boolean] = publish(Setting(name, descr, initialValue, aliases = aliases)) @@ -313,5 +305,4 @@ object Settings: def OptionSetting[T: ClassTag](name: String, descr: String, aliases: List[String] = Nil): Setting[Option[T]] = publish(Setting(name, descr, None, propertyClass = Some(summon[ClassTag[T]].runtimeClass), aliases = aliases)) - } end Settings diff --git a/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala index 5b79432a97e7..3bc96885df3b 100644 --- a/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala +++ b/compiler/src/dotty/tools/dotc/config/WrappedProperties.scala @@ -8,7 +8,7 @@ import scala.language.unsafeNulls * Motivated by places like google app engine throwing exceptions * on property lookups. */ -trait WrappedProperties extends PropertiesTrait { +trait WrappedProperties extends PropertiesTrait: def wrap[T](body: => T): Option[T] protected def propCategory: String = "wrapped" @@ -21,22 +21,17 @@ trait WrappedProperties extends PropertiesTrait { override def envOrElse(name: String, alt: String): String = wrap(super.envOrElse(name, alt)) getOrElse alt override def envOrNone(name: String): Option[String] = wrap(super.envOrNone(name)).flatten - def systemProperties: Iterator[(String, String)] = { + def systemProperties: Iterator[(String, String)] = import scala.jdk.CollectionConverters._ wrap(System.getProperties.asScala.iterator) getOrElse Iterator.empty - } -} -object WrappedProperties { - object AccessControl extends WrappedProperties { +object WrappedProperties: + object AccessControl extends WrappedProperties: def wrap[T](body: => T): Option[T] = try Some(body) - catch { + catch // the actual exception we are concerned with is AccessControlException, // but that's deprecated on JDK 17, so catching its superclass is a convenient // way to avoid a deprecation warning case _: SecurityException => None - } - } -} diff --git a/compiler/src/dotty/tools/dotc/core/Annotations.scala b/compiler/src/dotty/tools/dotc/core/Annotations.scala index 202f3eb26e41..6f675f187636 100644 --- a/compiler/src/dotty/tools/dotc/core/Annotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Annotations.scala @@ -10,13 +10,13 @@ import printing.Texts.Text import scala.annotation.internal.sharable -object Annotations { +object Annotations: def annotClass(tree: Tree)(using Context) = if (tree.symbol.isConstructor) tree.symbol.owner else tree.tpe.typeSymbol - abstract class Annotation extends Showable { + abstract class Annotation extends Showable: def tree(using Context): Tree def symbol(using Context): Symbol = annotClass(tree) @@ -33,10 +33,9 @@ object Annotations { /** All arguments to this annotation in a single flat list */ def arguments(using Context): List[Tree] = tpd.allArguments(tree) - def argument(i: Int)(using Context): Option[Tree] = { + def argument(i: Int)(using Context): Option[Tree] = val args = arguments if (i < args.length) Some(args(i)) else None - } def argumentConstant(i: Int)(using Context): Option[Constant] = for (case ConstantType(c) <- argument(i) map (_.tpe.widenTermRefExpr.normalized)) yield c @@ -72,12 +71,11 @@ object Annotations { def refersToParamOf(tl: TermLambda)(using Context): Boolean = val args = arguments if args.isEmpty then false - else tree.existsSubTree { + else tree.existsSubTree: case id: Ident => id.tpe.stripped match - case TermParamRef(tl1, _) => tl eq tl1 - case _ => false + case TermParamRef(tl1, _) => tl eq tl1 + case _ => false case _ => false - } /** A string representation of the annotation. Overridden in BodyAnnotation. */ @@ -88,7 +86,7 @@ object Annotations { def sameAnnotation(that: Annotation)(using Context): Boolean = symbol == that.symbol && tree.sameTree(that.tree) - def hasOneOfMetaAnnotation(metaSyms: Set[Symbol], orNoneOf: Set[Symbol] = Set.empty)(using Context): Boolean = atPhaseNoLater(erasurePhase) { + def hasOneOfMetaAnnotation(metaSyms: Set[Symbol], orNoneOf: Set[Symbol] = Set.empty)(using Context): Boolean = atPhaseNoLater(erasurePhase): def go(metaSyms: Set[Symbol]) = def recTp(tp: Type): Boolean = tp.dealiasKeepAnnots match case AnnotatedType(parent, metaAnnot) => metaSyms.exists(metaAnnot.matches) || recTp(parent) @@ -102,21 +100,19 @@ object Annotations { case _ => false metaSyms.exists(symbol.hasAnnotation) || rec(tree) go(metaSyms) || orNoneOf.nonEmpty && !go(orNoneOf) - } /** Operations for hash-consing, can be overridden */ def hash: Int = System.identityHashCode(this) def eql(that: Annotation) = this eq that - } case class ConcreteAnnotation(t: Tree) extends Annotation: def tree(using Context): Tree = t - abstract class LazyAnnotation extends Annotation { + abstract class LazyAnnotation extends Annotation: protected var mySym: Symbol | (Context ?=> Symbol) | Null override def symbol(using parentCtx: Context): Symbol = assert(mySym != null) - mySym match { + mySym match case symFn: (Context ?=> Symbol) @unchecked => mySym = null mySym = atPhaseBeforeTransforms(symFn) @@ -128,23 +124,20 @@ object Annotations { case sym: Symbol if sym.defRunId != parentCtx.runId => mySym = sym.denot.current.symbol case _ => - } mySym.asInstanceOf[Symbol] protected var myTree: Tree | (Context ?=> Tree) | Null def tree(using Context): Tree = assert(myTree != null) - myTree match { + myTree match case treeFn: (Context ?=> Tree) @unchecked => myTree = null myTree = atPhaseBeforeTransforms(treeFn) case _ => - } myTree.asInstanceOf[Tree] override def isEvaluating: Boolean = myTree == null override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] - } class DeferredSymAndTree(symFn: Context ?=> Symbol, treeFn: Context ?=> Tree) extends LazyAnnotation: @@ -155,43 +148,38 @@ object Annotations { * typically of an inline method. Treated specially in * pickling/unpickling and TypeTreeMaps */ - abstract class BodyAnnotation extends Annotation { + abstract class BodyAnnotation extends Annotation: override def symbol(using Context): ClassSymbol = defn.BodyAnnot override def derivedAnnotation(tree: Tree)(using Context): Annotation = if (tree eq this.tree) this else ConcreteBodyAnnotation(tree) override def arguments(using Context): List[Tree] = Nil override def ensureCompleted(using Context): Unit = () override def toText(printer: Printer): Text = "@Body" - } - class ConcreteBodyAnnotation(body: Tree) extends BodyAnnotation { + class ConcreteBodyAnnotation(body: Tree) extends BodyAnnotation: def tree(using Context): Tree = body - } - abstract class LazyBodyAnnotation extends BodyAnnotation { + abstract class LazyBodyAnnotation extends BodyAnnotation: // Copy-pasted from LazyAnnotation to avoid having to turn it into a trait protected var myTree: Tree | (Context ?=> Tree) | Null def tree(using Context): Tree = assert(myTree != null) - myTree match { + myTree match case treeFn: (Context ?=> Tree) @unchecked => myTree = null myTree = atPhaseBeforeTransforms(treeFn) case _ => - } myTree.asInstanceOf[Tree] override def isEvaluating: Boolean = myTree == null override def isEvaluated: Boolean = myTree.isInstanceOf[Tree @unchecked] - } - object LazyBodyAnnotation { + object LazyBodyAnnotation: def apply(bodyFn: Context ?=> Tree): LazyBodyAnnotation = new LazyBodyAnnotation: protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> bodyFn(using ctx) - } - object Annotation { + object Annotation: def apply(tree: Tree): ConcreteAnnotation = ConcreteAnnotation(tree) @@ -212,71 +200,60 @@ object Annotations { /** Create an annotation where the tree is computed lazily. */ def deferred(sym: Symbol)(treeFn: Context ?=> Tree): Annotation = - new LazyAnnotation { + new LazyAnnotation: protected var myTree: Tree | (Context ?=> Tree) | Null = ctx ?=> treeFn(using ctx) protected var mySym: Symbol | (Context ?=> Symbol) | Null = sym - } /** Create an annotation where the symbol and the tree are computed lazily. */ def deferredSymAndTree(symFn: Context ?=> Symbol)(treeFn: Context ?=> Tree): Annotation = DeferredSymAndTree(symFn, treeFn) /** Extractor for child annotations */ - object Child { + object Child: /** A deferred annotation to the result of a given child computation */ - def later(delayedSym: Context ?=> Symbol, span: Span)(using Context): Annotation = { - def makeChildLater(using Context) = { + def later(delayedSym: Context ?=> Symbol, span: Span)(using Context): Annotation = + def makeChildLater(using Context) = val sym = delayedSym New(defn.ChildAnnot.typeRef.appliedTo(sym.owner.thisType.select(sym.name, sym)), Nil) .withSpan(span) - } deferred(defn.ChildAnnot)(makeChildLater) - } /** A regular, non-deferred Child annotation */ def apply(sym: Symbol, span: Span)(using Context): Annotation = later(sym, span) def unapply(ann: Annotation)(using Context): Option[Symbol] = - if (ann.symbol == defn.ChildAnnot) { + if (ann.symbol == defn.ChildAnnot) val AppliedType(_, (arg: NamedType) :: Nil) = ann.tree.tpe: @unchecked Some(arg.symbol) - } else None - } def makeSourceFile(path: String, span: Span)(using Context): Annotation = apply(defn.SourceFileAnnot, Literal(Constant(path)), span) - } @sharable val EmptyAnnotation = Annotation(EmptyTree) - def ThrowsAnnotation(cls: ClassSymbol)(using Context): Annotation = { + def ThrowsAnnotation(cls: ClassSymbol)(using Context): Annotation = val tref = cls.typeRef Annotation(defn.ThrowsAnnot.typeRef.appliedTo(tref), Ident(tref), cls.span) - } /** Extracts the type of the thrown exception from an annotation. * * Supports both "old-style" `@throws(classOf[Exception])` * as well as "new-style" `@throws[Exception]("cause")` annotations. */ - object ThrownException { + object ThrownException: def unapply(a: Annotation)(using Context): Option[Type] = if (a.symbol ne defn.ThrowsAnnot) None - else a.argumentConstant(0) match { + else a.argumentConstant(0) match // old-style: @throws(classOf[Exception]) (which is throws[T](classOf[Exception])) case Some(Constant(tpe: Type)) => Some(tpe) // new-style: @throws[Exception], @throws[Exception]("cause") case _ => - stripApply(a.tree) match { + stripApply(a.tree) match case TypeApply(_, List(tpt)) => Some(tpt.tpe) case _ => None - } - } - } -} diff --git a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala index a61701eee2d7..22fb76851dc7 100644 --- a/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala +++ b/compiler/src/dotty/tools/dotc/core/CheckRealizable.scala @@ -10,14 +10,13 @@ import config.SourceVersion.future import config.Feature.sourceVersion /** Realizability status */ -object CheckRealizable { +object CheckRealizable: - sealed abstract class Realizability(val msg: String) { + sealed abstract class Realizability(val msg: String): def andAlso(other: => Realizability): Realizability = if (this == Realizable) other else this def mapError(f: Realizability => Realizability): Realizability = if (this == Realizable) this else f(this) - } object Realizable extends Realizability("") @@ -39,9 +38,8 @@ object CheckRealizable { extends Realizability(i" has a member $fld which is not a legal path\nsince ${fld.symbol.name}: ${fld.info}${problem.msg}") class ProblemInUnderlying(tp: Type, problem: Realizability)(using Context) - extends Realizability(i"s underlying type ${tp}${problem.msg}") { + extends Realizability(i"s underlying type ${tp}${problem.msg}"): assert(problem != Realizable) - } def realizability(tp: Type)(using Context): Realizability = new CheckRealizable().realizability(tp) @@ -50,7 +48,6 @@ object CheckRealizable { new CheckRealizable().boundsRealizability(tp) private val LateInitializedFlags = Lazy | Erased -} /** Compute realizability status. * @@ -61,7 +58,7 @@ object CheckRealizable { * In general, a realizable type can have multiple inhabitants, hence it need not be stable (in the sense of * Type.isStable). */ -class CheckRealizable(using Context) { +class CheckRealizable(using Context): import CheckRealizable._ /** A set of all fields that have already been checked. Used @@ -75,7 +72,7 @@ class CheckRealizable(using Context) { private def isLateInitialized(sym: Symbol) = sym.isOneOf(LateInitializedFlags, butNot = Module) /** The realizability status of given type `tp`*/ - def realizability(tp: Type): Realizability = tp.dealias match { + def realizability(tp: Type): Realizability = tp.dealias match /* * A `TermRef` for a path `p` is realizable if * - `p`'s type is stable and realizable, or @@ -89,7 +86,7 @@ class CheckRealizable(using Context) { val sym = tp.symbol lazy val tpInfoRealizable = realizability(tp.info) if (sym.is(StableRealizable)) realizability(tp.prefix) - else { + else val r = if (sym.isStableMember && !isLateInitialized(sym)) // it's realizable because we know that a value of type `tp` has been created at run-time @@ -106,35 +103,30 @@ class CheckRealizable(using Context) { realizability(tp.prefix) } mapError { r => // A mutable path is in fact stable and realizable if it has a realizable singleton type. - if (tp.info.isStable && tpInfoRealizable == Realizable) { + if (tp.info.isStable && tpInfoRealizable == Realizable) sym.setFlag(StableRealizable) Realizable - } else r } - } case _: SingletonType | NoPrefix => Realizable case tp => - def isConcrete(tp: Type): Boolean = tp.dealias match { + def isConcrete(tp: Type): Boolean = tp.dealias match case tp: TypeRef => tp.symbol.isClass case tp: TypeParamRef => false case tp: TypeProxy => isConcrete(tp.underlying) case tp: AndType => isConcrete(tp.tp1) && isConcrete(tp.tp2) case tp: OrType => isConcrete(tp.tp1) && isConcrete(tp.tp2) case _ => false - } if (!isConcrete(tp)) NotConcrete else boundsRealizability(tp).andAlso(memberRealizability(tp)) - } - private def refinedNames(tp: Type): Set[Name] = tp.dealias match { + private def refinedNames(tp: Type): Set[Name] = tp.dealias match case tp: RefinedType => refinedNames(tp.parent) + tp.refinedName case tp: AndType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) case tp: OrType => refinedNames(tp.tp1) ++ refinedNames(tp.tp2) case tp: TypeProxy => refinedNames(tp.superType) case _ => Set.empty - } /** `Realizable` if `tp` has good bounds, a `HasProblem...` instance * pointing to a bad bounds member otherwise. "Has good bounds" means: @@ -147,17 +139,16 @@ class CheckRealizable(using Context) { * (depending on the simplification scheme for AndTypes employed, this could * also lead to base types with bad bounds). */ - private def boundsRealizability(tp: Type) = { + private def boundsRealizability(tp: Type) = - val memberProblems = withMode(Mode.CheckBoundsOrSelfType) { + val memberProblems = withMode(Mode.CheckBoundsOrSelfType): for { mbr <- tp.nonClassTypeMembers if !(mbr.info.loBound <:< mbr.info.hiBound) } yield new HasProblemBounds(mbr.name, mbr.info) - } - val refinementProblems = withMode(Mode.CheckBoundsOrSelfType) { + val refinementProblems = withMode(Mode.CheckBoundsOrSelfType): for { name <- refinedNames(tp) if (name.isTypeName) @@ -166,17 +157,14 @@ class CheckRealizable(using Context) { } yield new HasProblemBounds(name, mbr.info) - } - def baseTypeProblems(base: Type) = base match { + def baseTypeProblems(base: Type) = base match case AndType(base1, base2) => new HasProblemBase(base1, base2) :: Nil case base => - base.argInfos.collect { + base.argInfos.collect: case bounds @ TypeBounds(lo, hi) if !(lo <:< hi) => new HasProblemBaseArg(base, bounds) - } - } val baseProblems = tp.baseClasses.map(_.baseTypeOf(tp)).flatMap(baseTypeProblems) @@ -184,25 +172,22 @@ class CheckRealizable(using Context) { refinementProblems.foldLeft( memberProblems.foldLeft( Realizable: Realizability)(_ andAlso _))(_ andAlso _))(_ andAlso _) - } /** `Realizable` if all of `tp`'s non-strict fields have realizable types, * a `HasProblemField` instance pointing to a bad field otherwise. */ - private def memberRealizability(tp: Type) = { + private def memberRealizability(tp: Type) = def checkField(sofar: Realizability, fld: SingleDenotation): Realizability = - sofar andAlso { + sofar andAlso: if (checkedFields.contains(fld.symbol) || fld.symbol.isOneOf(Private | Mutable | LateInitializedFlags)) // if field is private it cannot be part of a visible path // if field is mutable it cannot be part of a path // if field is lazy or erased it does not need to be initialized when the owning object is // so in all cases the field does not influence realizability of the enclosing object. Realizable - else { + else checkedFields += fld.symbol realizability(fld.info).mapError(r => new HasProblemField(fld, r)) - } - } if sourceVersion.isAtLeast(future) then // check fields only from version 3.x. // Reason: An embedded field could well be nullable, which means it @@ -211,5 +196,3 @@ class CheckRealizable(using Context) { tp.fields.foldLeft(Realizable: Realizability)(checkField) else Realizable - } -} diff --git a/compiler/src/dotty/tools/dotc/core/Comments.scala b/compiler/src/dotty/tools/dotc/core/Comments.scala index 1b20b75ad8ac..c94342e89ebe 100644 --- a/compiler/src/dotty/tools/dotc/core/Comments.scala +++ b/compiler/src/dotty/tools/dotc/core/Comments.scala @@ -13,7 +13,7 @@ import util.Property.Key import parsing.Parsers.Parser import reporting.ProperDefinitionNotFound -object Comments { +object Comments: val ContextDoc: Key[ContextDocstrings] = new Key[ContextDocstrings] /** Decorator for getting docbase out of context */ @@ -23,7 +23,7 @@ object Comments { /** Context for Docstrings, contains basic functionality for getting * docstrings via `Symbol` and expanding templates */ - class ContextDocstrings { + class ContextDocstrings: private val _docstrings: MutableSymbolMap[Comment] = MutableSymbolMap[Comment](512) // FIXME: 2nd [Comment] needed or "not a class type" @@ -35,7 +35,6 @@ object Comments { def addDocstring(sym: Symbol, doc: Option[Comment]): Unit = doc.foreach(d => _docstrings.update(sym, d)) - } /** * A `Comment` contains the unformatted docstring, it's position and potentially more @@ -52,7 +51,7 @@ object Comments { expanded: Option[String], usecases: List[UseCase], variables: Map[String, String], - ) { + ): /** Has this comment been cooked or expanded? */ def isExpanded: Boolean = expanded.isDefined @@ -70,14 +69,12 @@ object Comments { * @param f The expansion function. * @return The expanded comment, with the `usecases` populated. */ - def expand(f: String => String)(using Context): Comment = { + def expand(f: String => String)(using Context): Comment = val expandedComment = f(raw) val useCases = Comment.parseUsecases(expandedComment, span) Comment(span, raw, Some(expandedComment), useCases, Map.empty) - } - } - object Comment { + object Comment: def isDocComment(comment: String): Boolean = comment.startsWith("/**") @@ -100,14 +97,13 @@ object Comments { * def foo: A = ??? * }}} */ - private def decomposeUseCase(body: String, span: Span, start: Int, end: Int)(using Context): UseCase = { + private def decomposeUseCase(body: String, span: Span, start: Int, end: Int)(using Context): UseCase = def subPos(start: Int, end: Int) = if (span == NoSpan) NoSpan - else { + else val start1 = span.start + start val end1 = span.end + end span withStart start1 withPoint start1 withEnd end1 - } val codeStart = skipWhitespace(body, start + "@usecase".length) val codeEnd = skipToEol(body, codeStart) @@ -115,44 +111,36 @@ object Comments { val codePos = subPos(codeStart, codeEnd) UseCase(code, codePos) - } - } - final case class UseCase(code: String, codePos: Span, untpdCode: untpd.Tree, tpdCode: Option[tpd.DefDef]) { + final case class UseCase(code: String, codePos: Span, untpdCode: untpd.Tree, tpdCode: Option[tpd.DefDef]): def typed(tpdCode: tpd.DefDef): UseCase = copy(tpdCode = Some(tpdCode)) - } - object UseCase { - def apply(code: String, codePos: Span)(using Context): UseCase = { - val tree = { + object UseCase: + def apply(code: String, codePos: Span)(using Context): UseCase = + val tree = val tree = new Parser(SourceFile.virtual("", code)).localDef(codePos.start) - tree match { + tree match case tree: untpd.DefDef => val newName = ctx.compilationUnit.freshNames.newName(tree.name, NameKinds.DocArtifactName) untpd.cpy.DefDef(tree)(name = newName) case _ => report.error(ProperDefinitionNotFound(), ctx.source.atSpan(codePos)) tree - } - } UseCase(code, codePos, tree, None) - } - } /** * Port of DocComment.scala from nsc * @author Martin Odersky * @author Felix Mulder */ - class CommentExpander { + class CommentExpander: import dotc.config.Printers.scaladoc import scala.collection.mutable - def expand(sym: Symbol, site: Symbol)(using Context): String = { + def expand(sym: Symbol, site: Symbol)(using Context): String = val parent = if (site != NoSymbol) site else sym defineVariables(parent) expandedDocComment(sym, parent) - } /** The cooked doc comment of symbol `sym` after variable expansion, or "" if missing. * @@ -162,24 +150,22 @@ object Comments { * of the same string are done, which is * interpreted as a recursive variable definition. */ - def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(using Context): String = { + def expandedDocComment(sym: Symbol, site: Symbol, docStr: String = "")(using Context): String = // when parsing a top level class or module, use the (module-)class itself to look up variable definitions val parent = if ((sym.is(Flags.Module) || sym.isClass) && site.is(Flags.Package)) sym else site expandVariables(cookedDocComment(sym, docStr), sym, parent) - } private def template(raw: String): String = removeSections(raw, "@define") - private def defines(raw: String): List[String] = { + private def defines(raw: String): List[String] = val sections = tagIndex(raw) val defines = sections filter { startsWithTag(raw, _, "@define") } val usecases = sections filter { startsWithTag(raw, _, "@usecase") } val end = startTag(raw, (defines ::: usecases).sortBy(_._1)) defines map { case (start, end) => raw.substring(start, end) } - } private def replaceInheritDocToInheritdoc(docStr: String): String = docStr.replaceAll("""\{@inheritDoc\p{Zs}*\}""", "@inheritdoc") @@ -201,7 +187,7 @@ object Comments { else template(docStr) ownComment = replaceInheritDocToInheritdoc(ownComment) - superComment(sym) match { + superComment(sym) match case None => // SI-8210 - The warning would be false negative when this symbol is a setter if (ownComment.indexOf("@inheritdoc") != -1 && ! sym.isSetter) @@ -210,7 +196,6 @@ object Comments { case Some(sc) => if (ownComment == "") sc else expandInheritdoc(sc, merge(sc, ownComment, sym), sym) - } }) private def isMovable(str: String, sec: (Int, Int)): Boolean = @@ -218,7 +203,7 @@ object Comments { startsWithTag(str, sec, "@tparam") || startsWithTag(str, sec, "@return") - def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = { + def merge(src: String, dst: String, sym: Symbol, copyFirstPara: Boolean = false): String = val srcSections = tagIndex(src) val dstSections = tagIndex(dst) val srcParams = paramDocs(src, "@param", srcSections) @@ -229,27 +214,24 @@ object Comments { var copied = 0 var tocopy = startTag(dst, dstSections dropWhile (!isMovable(dst, _))) - if (copyFirstPara) { + if (copyFirstPara) val eop = // end of comment body (first para), which is delimited by blank line, or tag, or end of comment (findNext(src, 0)(src.charAt(_) == '\n')) min startTag(src, srcSections) out append src.substring(0, eop).trim copied = 3 tocopy = 3 - } - def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match { + def mergeSection(srcSec: Option[(Int, Int)], dstSec: Option[(Int, Int)]) = dstSec match case Some((start, end)) => if (end > tocopy) tocopy = end case None => - srcSec match { + srcSec match case Some((start1, end1)) => out append dst.substring(copied, tocopy).trim out append "\n" copied = tocopy out append src.substring(start1, end1).trim case None => - } - } //TODO: enable this once you know how to get `sym.paramss` /* @@ -263,11 +245,9 @@ object Comments { */ if (out.length == 0) dst - else { + else out append dst.substring(copied) out.toString - } - } /** * Expand inheritdoc tags @@ -288,7 +268,7 @@ object Comments { def expandInheritdoc(parent: String, child: String, sym: Symbol): String = if (child.indexOf("@inheritdoc") == -1) child - else { + else val parentSections = tagIndex(parent) val childSections = tagIndex(child) val parentTagMap = sectionTagMap(parent, parentSections) @@ -305,15 +285,14 @@ object Comments { else childSection.replace("@inheritdoc", parentSection) - def getParentSection(section: (Int, Int)): String = { + def getParentSection(section: (Int, Int)): String = - def getSectionHeader = extractSectionTag(child, section) match { + def getSectionHeader = extractSectionTag(child, section) match case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section) case other => other - } def sectionString(param: String, paramMap: Map[String, (Int, Int)]): String = - paramMap.get(param) match { + paramMap.get(param) match case Some(section) => // Cleanup the section tag and parameter val sectionTextBounds = extractSectionText(parent, section) @@ -322,15 +301,12 @@ object Comments { scaladoc.println(s"""${sym.span}: the """" + getSectionHeader + "\" annotation of the " + sym + " comment contains @inheritdoc, but the corresponding section in the parent is not defined.") "" - } - child.substring(section._1, section._1 + 7) match { + child.substring(section._1, section._1 + 7) match case param@("@param "|"@tparam"|"@throws") => sectionString(extractSectionParam(child, section), parentNamedParams(param.trim)) case _ => sectionString(extractSectionTag(child, section), parentTagMap) - } - } def mainComment(str: String, sections: List[(Int, Int)]): String = if (str.trim.length > 3) @@ -348,12 +324,11 @@ object Comments { out.append("*/") out.toString - } - protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(using Context): String = { + protected def expandVariables(initialStr: String, sym: Symbol, site: Symbol)(using Context): String = val expandLimit = 10 - def expandInternal(str: String, depth: Int): String = { + def expandInternal(str: String, depth: Int): String = if (depth >= expandLimit) throw new ExpansionLimitExceeded(str) @@ -365,15 +340,14 @@ object Comments { while (idx < str.length) if ((str charAt idx) != '$' || isEscaped) idx += 1 - else { + else val vstart = idx idx = skipVariable(str, idx + 1) - def replaceWith(repl: String) = { + def replaceWith(repl: String) = out append str.substring(copied, vstart) out append repl copied = idx - } - variableName(str.substring(vstart + 1, idx)) match { + variableName(str.substring(vstart + 1, idx)) match case "super" => superComment(sym) foreach { sc => val superSections = tagIndex(sc) @@ -383,26 +357,20 @@ object Comments { } case "" => idx += 1 case vname => - lookupVariable(vname, site) match { + lookupVariable(vname, site) match case Some(replacement) => replaceWith(replacement) case None => scaladoc.println(s"Variable $vname undefined in comment for $sym in $site") - } - } - } if (out.length == 0) str - else { + else out append str.substring(copied) expandInternal(out.toString, depth + 1) - } - } // We suppressed expanding \$ throughout the recursion, and now we // need to replace \$ with $ so it looks as intended. expandInternal(initialStr, 0).replace("""\$""", "$") - } - def defineVariables(sym: Symbol)(using Context): Unit = { + def defineVariables(sym: Symbol)(using Context): Unit = val Trim = "(?s)^[\\s&&[^\n\r]]*(.*?)\\s*$".r val raw = ctx.docCtx.flatMap(_.docstring(sym).map(_.raw)).getOrElse("") @@ -412,11 +380,9 @@ object Comments { val (key, value) = str.splitAt(skipVariable(str, start)) key.drop(start) -> value } - } map { + } map: case (key, Trim(value)) => variableName(key) -> value.replaceAll("\\s+\\*+$", "") - } - } /** Maps symbols to the variable -> replacement maps that are defined * in their doc comments @@ -428,18 +394,16 @@ object Comments { * @param vble The variable for which a definition is searched * @param site The class for which doc comments are generated */ - def lookupVariable(vble: String, site: Symbol)(using Context): Option[String] = site match { + def lookupVariable(vble: String, site: Symbol)(using Context): Option[String] = site match case NoSymbol => None case _ => val searchList = if (site.flags.is(Flags.Module)) site :: site.info.baseClasses else site.info.baseClasses - searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match { + searchList collectFirst { case x if defs(x) contains vble => defs(x)(vble) } match case Some(str) if str startsWith "$" => lookupVariable(str.tail, site) case res => res orElse lookupVariable(vble, site.owner) - } - } /** The position of the raw doc comment of symbol `sym`, or NoPosition if missing * If a symbol does not have a doc comment but some overridden version of it does, @@ -458,5 +422,3 @@ object Comments { //else sym.owner.ancestors map (sym overriddenSymbol _) filter (_ != NoSymbol) class ExpansionLimitExceeded(str: String) extends Exception - } -} diff --git a/compiler/src/dotty/tools/dotc/core/Constants.scala b/compiler/src/dotty/tools/dotc/core/Constants.scala index f45e9e5217de..cf70051ad543 100644 --- a/compiler/src/dotty/tools/dotc/core/Constants.scala +++ b/compiler/src/dotty/tools/dotc/core/Constants.scala @@ -6,7 +6,7 @@ import Types._, Symbols._, Contexts._ import printing.Printer import printing.Texts.Text -object Constants { +object Constants: inline val NoTag = 0 inline val UnitTag = 1 @@ -22,7 +22,7 @@ object Constants { inline val NullTag = 11 inline val ClazzTag = 12 - class Constant(val value: Any, val tag: Int) extends printing.Showable with Product1[Any] { + class Constant(val value: Any, val tag: Int) extends printing.Showable with Product1[Any]: import java.lang.Double.doubleToRawLongBits import java.lang.Float.floatToRawIntBits @@ -36,7 +36,7 @@ object Constants { def isNonUnitAnyVal: Boolean = BooleanTag <= tag && tag <= DoubleTag def isAnyVal: Boolean = UnitTag <= tag && tag <= DoubleTag - def tpe(using Context): Type = tag match { + def tpe(using Context): Type = tag match case UnitTag => defn.UnitType case BooleanTag => defn.BooleanType case ByteTag => defn.ByteType @@ -49,27 +49,24 @@ object Constants { case StringTag => defn.StringType case NullTag => defn.NullType case ClazzTag => defn.ClassType(typeValue) - } /** We need the equals method to take account of tags as well as values. */ - override def equals(other: Any): Boolean = other match { + override def equals(other: Any): Boolean = other match case that: Constant => this.tag == that.tag && equalHashValue == that.equalHashValue case _ => false - } - def isNaN: Boolean = value match { + def isNaN: Boolean = value match case f: Float => f.isNaN case d: Double => d.isNaN case _ => false - } def booleanValue: Boolean = if (tag == BooleanTag) value.asInstanceOf[Boolean] else throw new Error("value " + value + " is not a boolean") - def byteValue: Byte = tag match { + def byteValue: Byte = tag match case ByteTag => value.asInstanceOf[Byte] case ShortTag => value.asInstanceOf[Short].toByte case CharTag => value.asInstanceOf[Char].toByte @@ -78,9 +75,8 @@ object Constants { case FloatTag => value.asInstanceOf[Float].toByte case DoubleTag => value.asInstanceOf[Double].toByte case _ => throw new Error("value " + value + " is not a Byte") - } - def shortValue: Short = tag match { + def shortValue: Short = tag match case ByteTag => value.asInstanceOf[Byte].toShort case ShortTag => value.asInstanceOf[Short] case CharTag => value.asInstanceOf[Char].toShort @@ -89,9 +85,8 @@ object Constants { case FloatTag => value.asInstanceOf[Float].toShort case DoubleTag => value.asInstanceOf[Double].toShort case _ => throw new Error("value " + value + " is not a Short") - } - def charValue: Char = tag match { + def charValue: Char = tag match case ByteTag => value.asInstanceOf[Byte].toChar case ShortTag => value.asInstanceOf[Short].toChar case CharTag => value.asInstanceOf[Char] @@ -100,9 +95,8 @@ object Constants { case FloatTag => value.asInstanceOf[Float].toChar case DoubleTag => value.asInstanceOf[Double].toChar case _ => throw new Error("value " + value + " is not a Char") - } - def intValue: Int = tag match { + def intValue: Int = tag match case ByteTag => value.asInstanceOf[Byte].toInt case ShortTag => value.asInstanceOf[Short].toInt case CharTag => value.asInstanceOf[Char].toInt @@ -111,9 +105,8 @@ object Constants { case FloatTag => value.asInstanceOf[Float].toInt case DoubleTag => value.asInstanceOf[Double].toInt case _ => throw new Error("value " + value + " is not an Int") - } - def longValue: Long = tag match { + def longValue: Long = tag match case ByteTag => value.asInstanceOf[Byte].toLong case ShortTag => value.asInstanceOf[Short].toLong case CharTag => value.asInstanceOf[Char].toLong @@ -122,9 +115,8 @@ object Constants { case FloatTag => value.asInstanceOf[Float].toLong case DoubleTag => value.asInstanceOf[Double].toLong case _ => throw new Error("value " + value + " is not a Long") - } - def floatValue: Float = tag match { + def floatValue: Float = tag match case ByteTag => value.asInstanceOf[Byte].toFloat case ShortTag => value.asInstanceOf[Short].toFloat case CharTag => value.asInstanceOf[Char].toFloat @@ -133,9 +125,8 @@ object Constants { case FloatTag => value.asInstanceOf[Float] case DoubleTag => value.asInstanceOf[Double].toFloat case _ => throw new Error("value " + value + " is not a Float") - } - def doubleValue: Double = tag match { + def doubleValue: Double = tag match case ByteTag => value.asInstanceOf[Byte].toDouble case ShortTag => value.asInstanceOf[Short].toDouble case CharTag => value.asInstanceOf[Char].toDouble @@ -144,24 +135,21 @@ object Constants { case FloatTag => value.asInstanceOf[Float].toDouble case DoubleTag => value.asInstanceOf[Double] case _ => throw new Error("value " + value + " is not a Double") - } /** Convert constant value to conform to given type. */ - def convertTo(pt: Type)(using Context): Constant | Null = { - def classBound(pt: Type): Type = pt.dealias.stripTypeVar match { + def convertTo(pt: Type)(using Context): Constant | Null = + def classBound(pt: Type): Type = pt.dealias.stripTypeVar match case tref: TypeRef if !tref.symbol.isClass && tref.info.exists => classBound(tref.info.bounds.lo) case param: TypeParamRef => - ctx.typerState.constraint.entry(param) match { + ctx.typerState.constraint.entry(param) match case TypeBounds(lo, hi) => if (hi.classSymbol.isPrimitiveValueClass) hi //constrain further with high bound else classBound(lo) case NoType => classBound(param.binder.paramInfos(param.paramNum).lo) case inst => classBound(inst) - } case pt => pt - } pt match case ConstantType(value) if value == this => this case _: SingletonType => null @@ -185,7 +173,6 @@ object Constants { Constant(doubleValue) else null - } def stringValue: String = value.toString @@ -203,29 +190,26 @@ object Constants { * constants in regular Scala code, but it is conceivable that you could * conjure them with a macro. */ - private def equalHashValue: Any = value match { + private def equalHashValue: Any = value match case f: Float => floatToRawIntBits(f) case d: Double => doubleToRawLongBits(d) case v => v - } - override def hashCode: Int = { + override def hashCode: Int = import scala.util.hashing.MurmurHash3._ val seed = 17 var h = seed h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide. h = mix(h, equalHashValue.##) finalizeHash(h, length = 2) - } override def toString: String = s"Constant($value)" def canEqual(x: Any): Boolean = true def get: Any = value def isEmpty: Boolean = false def _1: Any = value - } - object Constant { + object Constant: def apply(x: Null): Constant = new Constant(x, NullTag) def apply(x: Unit): Constant = new Constant(x, UnitTag) def apply(x: Boolean): Constant = new Constant(x, BooleanTag) @@ -257,5 +241,3 @@ object Constants { ) def unapply(c: Constant): Constant = c - } -} diff --git a/compiler/src/dotty/tools/dotc/core/Constraint.scala b/compiler/src/dotty/tools/dotc/core/Constraint.scala index c634f847e510..c8c589fce4f7 100644 --- a/compiler/src/dotty/tools/dotc/core/Constraint.scala +++ b/compiler/src/dotty/tools/dotc/core/Constraint.scala @@ -14,7 +14,7 @@ import util.{SimpleIdentitySet, SimpleIdentityMap} * - TypeVar Every constrained parameter might be associated with a TypeVar * that has the TypeParamRef as origin. */ -abstract class Constraint extends Showable { +abstract class Constraint extends Showable: type This <: Constraint @@ -221,7 +221,6 @@ abstract class Constraint extends Showable { * of athe type lambda that is associated with the typevar itself. */ def checkConsistentVars()(using Context): Unit -} /** When calling `Constraint#addLess(p1, p2, ...)`, the caller might end up * unifying one parameter with the other, this enum lets `addLess` know which diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala index 9ffe2bda73cb..2e7810274ee2 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintHandling.scala @@ -26,7 +26,7 @@ import NullOpsDecorator.stripNull * By comparison: Constraint handlers are parts of type comparers and can use their functionality. * Constraint handlers update the current constraint as a side effect. */ -trait ConstraintHandling { +trait ConstraintHandling: def constr: config.Printers.Printer = config.Printers.constr @@ -214,7 +214,7 @@ trait ConstraintHandling { params.find(p => nestingLevel(p) <= maxLevel && representedParamRef(p) == oldParam && (p.paramName.is(AvoidNameKind.BothBounds) || - variance != 0 && p.paramName.is(nameKind))) + variance != 0 && p.paramName.is(nameKind))) // First, check if we can reuse an existing parameter, this is more than an optimization // since it avoids an infinite loop in tests/pos/i8900-cycle.scala @@ -371,11 +371,11 @@ trait ConstraintHandling { .showing(i"added $description = $result$location", constr) end addBoundTransitively - protected def addLess(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { + protected def addLess(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = def description = i"ordering $p1 <: $p2 to\n$constraint" val res = if (constraint.isLess(p2, p1)) unify(p2, p1) - else { + else val down1 = p1 :: constraint.exclusiveLower(p1, p2) val up2 = p2 :: constraint.exclusiveUpper(p2, p1) val lo1 = constraint.nonParamBounds(p1).lo @@ -384,10 +384,8 @@ trait ConstraintHandling { constraint = constraint.addLess(p1, p2) down1.forall(addOneBound(_, hi2, isUpper = true)) && up2.forall(addOneBound(_, lo1, isUpper = false)) - } constr.println(i"added $description = $res$location") res - } def location(using Context) = "" // i"in ${ctx.typerState.stateChainStr}" // use for debugging @@ -398,7 +396,7 @@ trait ConstraintHandling { * level will be kept and the transferred bounds from the other parameter * will be adjusted for level-correctness. */ - private def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = { + private def unify(p1: TypeParamRef, p2: TypeParamRef)(using Context): Boolean = constr.println(s"unifying $p1 $p2") if !constraint.isLess(p1, p2) then constraint = constraint.addLess(p1, p2) @@ -440,7 +438,6 @@ trait ConstraintHandling { isSub(lo, hi) && down.forall(addOneBound(_, hi, isUpper = true)) && up.forall(addOneBound(_, lo, isUpper = false)) - } protected def isSubType(tp1: Type, tp2: Type, whenFrozen: Boolean)(using Context): Boolean = if (whenFrozen) @@ -448,17 +445,15 @@ trait ConstraintHandling { else isSub(tp1, tp2) - inline final def inFrozenConstraint[T](op: => T): T = { + inline final def inFrozenConstraint[T](op: => T): T = val savedFrozen = frozenConstraint val savedLambda = caseLambda frozenConstraint = true caseLambda = NoType try op - finally { + finally frozenConstraint = savedFrozen caseLambda = savedLambda - } - } final def isSubTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSub(tp1, tp2)) final def isSameTypeWhenFrozen(tp1: Type, tp2: Type)(using Context): Boolean = inFrozenConstraint(isSame(tp1, tp2)) @@ -469,10 +464,9 @@ trait ConstraintHandling { protected final def isSatisfiable(using Context): Boolean = constraint.forallParams { param => val TypeBounds(lo, hi) = constraint.entry(param): @unchecked - isSub(lo, hi) || { + isSub(lo, hi) `||`: report.log(i"sub fail $lo <:< $hi") false - } } /** Fix instance type `tp` by avoidance so that it does not contain references @@ -701,7 +695,7 @@ trait ConstraintHandling { * The instance type is not allowed to contain references to types nested deeper * than `maxLevel`. */ - def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int)(using Context): Type = { + def instanceType(param: TypeParamRef, fromBelow: Boolean, widenUnions: Boolean, maxLevel: Int)(using Context): Type = val approx = approximation(param, fromBelow, maxLevel).simplified if fromBelow then val widened = widenInferred(approx, param, widenUnions) @@ -717,7 +711,6 @@ trait ConstraintHandling { widened else approx - } /** Constraint `c1` subsumes constraint `c2`, if under `c2` as constraint we have * for all poly params `p` defined in `c2` as `p >: L2 <: U2`: @@ -732,7 +725,7 @@ trait ConstraintHandling { protected final def subsumes(c1: Constraint, c2: Constraint, pre: Constraint)(using Context): Boolean = if (c2 eq pre) true else if (c1 eq pre) false - else { + else val saved = constraint try // We iterate over params of `pre`, instead of `c2` as the documentation may suggest. @@ -746,7 +739,6 @@ trait ConstraintHandling { && isSubTypeWhenFrozen(c1.nonParamBounds(p), c2.nonParamBounds(p)) ) finally constraint = saved - } /** The current bounds of type parameter `param` */ def bounds(param: TypeParamRef)(using Context): TypeBounds = constraint.bounds(param) @@ -756,12 +748,12 @@ trait ConstraintHandling { * @param tvars See Constraint#add */ def addToConstraint(tl: TypeLambda, tvars: List[TypeVar])(using Context): Boolean = - checkPropagated(i"initialized $tl") { + checkPropagated(i"initialized $tl"): constraint = constraint.add(tl, tvars) tl.paramRefs.forall { param => val lower = constraint.lower(param) val upper = constraint.upper(param) - constraint.entry(param) match { + constraint.entry(param) match case bounds: TypeBounds => if lower.nonEmpty && !bounds.lo.isRef(defn.NothingClass) || upper.nonEmpty && !bounds.hi.isAny @@ -772,9 +764,7 @@ trait ConstraintHandling { // Happens if param was already solved while processing earlier params of the same TypeLambda. // See #4720. true - } } - } /** Can `param` be constrained with new bounds? */ final def canConstrain(param: TypeParamRef): Boolean = @@ -811,9 +801,9 @@ trait ConstraintHandling { */ def avoidLambdaParams(tp: Type) = if comparedTypeLambdas.nonEmpty then - val approx = new ApproximatingTypeMap { + val approx = new ApproximatingTypeMap: if (!fromBelow) variance = -1 - def apply(t: Type): Type = t match { + def apply(t: Type): Type = t match case t @ TypeParamRef(tl: TypeLambda, n) if comparedTypeLambdas contains tl => val bounds = tl.paramInfos(n) range(bounds.lo, bounds.hi) @@ -824,18 +814,15 @@ trait ConstraintHandling { finally comparedTypeLambdas = saved case _ => mapOver(t) - } - } approx(tp) else tp def addParamBound(bound: TypeParamRef) = - constraint.entry(param) match { + constraint.entry(param) match case _: TypeBounds => if (fromBelow) addLess(bound, param) else addLess(param, bound) case tp => if (fromBelow) isSub(bound, tp) else isSub(tp, bound) - } def kindCompatible(tp1: Type, tp2: Type): Boolean = val tparams1 = tp1.typeParams @@ -848,7 +835,7 @@ trait ConstraintHandling { def description = i"constr $param ${if (fromBelow) ">:" else "<:"} $bound:\n$constraint" //checkPropagated(s"adding $description")(true) // DEBUG in case following fails - checkPropagated(s"added $description") { + checkPropagated(s"added $description"): addConstraintInvocations += 1 val saved = canWidenAbstract canWidenAbstract = true @@ -861,24 +848,18 @@ trait ConstraintHandling { finally canWidenAbstract = saved addConstraintInvocations -= 1 - } end addConstraint /** Check that constraint is fully propagated. See comment in Config.checkConstraintsPropagated */ - def checkPropagated(msg: => String)(result: Boolean)(using Context): Boolean = { + def checkPropagated(msg: => String)(result: Boolean)(using Context): Boolean = if (Config.checkConstraintsPropagated && result && addConstraintInvocations == 0) - inFrozenConstraint { - for (p <- constraint.domainParams) { + inFrozenConstraint: + for (p <- constraint.domainParams) def check(cond: => Boolean, q: TypeParamRef, ordering: String, explanation: String): Unit = assert(cond, i"propagation failure for $p $ordering $q: $explanation\n$msg") for (u <- constraint.upper(p)) check(bounds(p).hi <:< bounds(u).hi, u, "<:", "upper bound not propagated") - for (l <- constraint.lower(p)) { + for (l <- constraint.lower(p)) check(bounds(l).lo <:< bounds(p).hi, l, ">:", "lower bound not propagated") check(constraint.isLess(l, p), l, ">:", "reverse ordering (<:) missing") - } - } - } result - } -} diff --git a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala index d2b1246a8149..2f92981f505a 100644 --- a/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala +++ b/compiler/src/dotty/tools/dotc/core/ConstraintRunInfo.scala @@ -8,10 +8,9 @@ trait ConstraintRunInfo { self: Run => private var maxSize = 0 private var maxConstraint: Constraint | Null = _ def recordConstraintSize(c: Constraint, size: Int): Unit = - if (size > maxSize) { + if (size > maxSize) maxSize = size maxConstraint = c - } def printMaxConstraint()(using Context): Unit = if maxSize > 0 then val printer = if ctx.settings.YdetailedStats.value then default else typr diff --git a/compiler/src/dotty/tools/dotc/core/ContextOps.scala b/compiler/src/dotty/tools/dotc/core/ContextOps.scala index aa85f714a8e5..a77ed74081ce 100644 --- a/compiler/src/dotty/tools/dotc/core/ContextOps.scala +++ b/compiler/src/dotty/tools/dotc/core/ContextOps.scala @@ -16,36 +16,32 @@ object ContextOps: * in order to make sure that updates to class members are reflected in * finger prints. */ - def enter(sym: Symbol): Symbol = inContext(ctx) { + def enter(sym: Symbol): Symbol = inContext(ctx): ctx.owner match case cls: ClassSymbol => cls.classDenot.enter(sym) case _ => ctx.scope.openForMutations.enter(sym) sym - } /** The denotation with the given `name` and all `required` flags in current context */ def denotNamed(name: Name, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = - inContext(ctx) { + inContext(ctx): if (ctx.owner.isClass) - if (ctx.outer.owner == ctx.owner) { // inner class scope; check whether we are referring to self - if (ctx.scope.size == 1) { + if (ctx.outer.owner == ctx.owner) // inner class scope; check whether we are referring to self + if (ctx.scope.size == 1) val elem = ctx.scope.lastEntry.nn if (elem.name == name) return elem.sym.denot // return self - } val pre = ctx.owner.thisType if ctx.isJava then javaFindMember(name, pre, required, excluded) else pre.findMember(name, pre, required, excluded) - } else // we are in the outermost context belonging to a class; self is invisible here. See inClassContext. ctx.owner.findMember(name, ctx.owner.thisType, required, excluded) else ctx.scope.denotsNamed(name).filterWithFlags(required, excluded).toDenot(NoPrefix) - } final def javaFindMember(name: Name, pre: Type, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags): Denotation = assert(ctx.isJava) - inContext(ctx) { + inContext(ctx): val preSym = pre.typeSymbol @@ -55,7 +51,7 @@ object ContextOps: // 2. Try to search in companion class if current is an object. def searchCompanionClass = if preSym.is(Flags.Module) then preSym.companionClass.thisType.findMember(name, pre, required, excluded) - else NoDenotation + else NoDenotation // 3. Try to search in companion objects of super classes. // In Java code, static inner classes, which we model as members of the companion object, @@ -77,37 +73,31 @@ object ContextOps: directSearch orElse searchCompanionClass orElse searchSuperCompanionObjects else directSearch - } /** A fresh local context with given tree and owner. * Owner might not exist (can happen for self valdefs), in which case * no owner is set in result context */ - def localContext(tree: untpd.Tree, owner: Symbol): FreshContext = inContext(ctx) { + def localContext(tree: untpd.Tree, owner: Symbol): FreshContext = inContext(ctx): val freshCtx = ctx.fresh.setTree(tree) if owner.exists then freshCtx.setOwner(owner) else freshCtx - } /** Context where `sym` is defined, assuming we are in a nested context. */ - def defContext(sym: Symbol): Context = inContext(ctx) { + def defContext(sym: Symbol): Context = inContext(ctx): ctx.outersIterator .dropWhile(_.owner != sym) .dropWhile(_.owner == sym) .next() - } /** A new context for the interior of a class */ - def inClassContext(selfInfo: TypeOrSymbol): Context = inContext(ctx) { + def inClassContext(selfInfo: TypeOrSymbol): Context = inContext(ctx): val localCtx: Context = ctx.fresh.setNewScope - selfInfo match { + selfInfo match case sym: Symbol if sym.exists && sym.name != nme.WILDCARD => localCtx.scope.openForMutations.enter(sym) case _ => - } localCtx - } - def packageContext(tree: untpd.PackageDef, pkg: Symbol): Context = inContext(ctx) { + def packageContext(tree: untpd.PackageDef, pkg: Symbol): Context = inContext(ctx): if (pkg.is(Package)) ctx.fresh.setOwner(pkg.moduleClass).setTree(tree) else ctx - } end ContextOps diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index e0e43169820a..1d055764135d 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -41,7 +41,7 @@ import plugins._ import java.util.concurrent.atomic.AtomicInteger import java.nio.file.InvalidPathException -object Contexts { +object Contexts: private val (compilerCallbackLoc, store1) = Store.empty.newLocation[CompilerCallback]() private val (sbtCallbackLoc, store2) = store1.newLocation[AnalysisCallback]() @@ -129,11 +129,10 @@ object Contexts { def outer: Context /** All outer contexts, ending in `base.initialCtx` and then `NoContext` */ - def outersIterator: Iterator[Context] = new Iterator[Context] { + def outersIterator: Iterator[Context] = new Iterator[Context]: var current = thiscontext def hasNext = current != NoContext def next = { val c = current; current = current.outer; c } - } def period: Period def mode: Mode @@ -198,15 +197,14 @@ object Contexts { /** The new implicit references that are introduced by this scope */ private var implicitsCache: ContextualImplicits | Null = null - def implicits: ContextualImplicits = { + def implicits: ContextualImplicits = if (implicitsCache == null) - implicitsCache = { + implicitsCache = val implicitRefs: List[ImplicitRef] = if (isClassDefContext) try owner.thisType.implicitMembers - catch { + catch case ex: CyclicReference => Nil - } else if (isImportContext) importInfo.nn.importedImplicits else if (isNonEmptyScopeContext) scope.implicitDecls else Nil @@ -217,9 +215,7 @@ object Contexts { outer.implicits if (implicitRefs.isEmpty) outerImplicits else new ContextualImplicits(implicitRefs, outerImplicits, isImportContext)(this) - } implicitsCache.nn - } /** Either the current scope, or, if the current context owner is a class, * the declarations of the current class. @@ -232,10 +228,9 @@ object Contexts { def nestingLevel: Int = effectiveScope.nestingLevel /** Sourcefile corresponding to given abstract file, memoized */ - def getSource(file: AbstractFile, codec: => Codec = Codec(settings.encoding.value)) = { + def getSource(file: AbstractFile, codec: => Codec = Codec(settings.encoding.value)) = util.Stats.record("Context.getSource") base.sources.getOrElseUpdate(file, SourceFile(file, codec)) - } /** SourceFile with given path name, memoized */ def getSource(path: TermName): SourceFile = getFile(path) match @@ -394,10 +389,9 @@ object Contexts { * context see the constructor parameters instead, but then we'd need a final substitution step * from constructor parameters to class parameter accessors. */ - def superCallContext: Context = { + def superCallContext: Context = val locals = newScopeWith(owner.typeParams ++ owner.asClass.paramAccessors: _*) superOrThisCallContext(owner.primaryConstructor, locals) - } /** The context for the arguments of a this(...) constructor call. * The context is computed from the local auxiliary constructor context. @@ -407,22 +401,20 @@ object Contexts { * - as outer context: The context enclosing the enclosing class context * - as scope: The parameters of the auxiliary constructor. */ - def thisCallArgContext: Context = { + def thisCallArgContext: Context = val constrCtx = outersIterator.dropWhile(_.outer.owner == owner).next() superOrThisCallContext(owner, constrCtx.scope) .setTyperState(typerState) .setGadtState(gadtState) .fresh .setScope(this.scope) - } /** The super- or this-call context with given owner and locals. */ - private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = { + private def superOrThisCallContext(owner: Symbol, locals: Scope): FreshContext = var classCtx = outersIterator.dropWhile(!_.isClassDefContext).next() classCtx.outer.fresh.setOwner(owner) .setScope(locals) .setMode(classCtx.mode) - } /** The context of expression `expr` seen as a member of a statement sequence */ def exprContext(stat: Tree[?], exprOwner: Symbol): Context = @@ -471,15 +463,13 @@ object Contexts { final def withProperty[T](key: Key[T], value: Option[T]): Context = if (property(key) == value) this - else value match { + else value match case Some(v) => fresh.setProperty(key, v) case None => fresh.dropProperty(key) - } - def typer: Typer = this.typeAssigner match { + def typer: Typer = this.typeAssigner match case typer: Typer => typer case _ => new Typer - } override def toString: String = def iinfo(using Context) = @@ -524,7 +514,7 @@ object Contexts { /** A fresh context allows selective modification * of its attributes using the with... methods. */ - class FreshContext(base: ContextBase) extends Context(base) { + class FreshContext(base: ContextBase) extends Context(base): util.Stats.record("Context.fresh") private var _outer: Context = uninitialized @@ -567,7 +557,7 @@ object Contexts { * @param outer The outer context * @param origin The context from which fields are copied */ - private[Contexts] def init(outer: Context, origin: Context): this.type = { + private[Contexts] def init(outer: Context, origin: Context): this.type = _outer = outer _period = origin.period _mode = origin.mode @@ -580,7 +570,6 @@ object Contexts { _moreProperties = origin.moreProperties _store = origin.store this - } def reuseIn(outer: Context): this.type = resetCaches() @@ -658,10 +647,9 @@ object Contexts { this._store = store this - def setCompilationUnit(compilationUnit: CompilationUnit): this.type = { + def setCompilationUnit(compilationUnit: CompilationUnit): this.type = setSource(compilationUnit.source) updateStore(compilationUnitLoc, compilationUnit) - } def setCompilerCallback(callback: CompilerCallback): this.type = updateStore(compilerCallbackLoc, callback) def setSbtCallback(callback: AnalysisCallback): this.type = updateStore(sbtCallbackLoc, callback) @@ -686,17 +674,15 @@ object Contexts { def dropProperty(key: Key[?]): this.type = setMoreProperties(moreProperties - key) - def addLocation[T](initial: T): Store.Location[T] = { + def addLocation[T](initial: T): Store.Location[T] = val (loc, store1) = store.newLocation(initial) setStore(store1) loc - } - def addLocation[T](): Store.Location[T] = { + def addLocation[T](): Store.Location[T] = val (loc, store1) = store.newLocation[T]() setStore(store1) loc - } def updateStore[T](loc: Store.Location[T], value: T): this.type = setStore(store.updated(loc, value)) @@ -708,7 +694,6 @@ object Contexts { setSettings(setting.updateIn(settingsState, value)) def setDebug: this.type = setSetting(base.settings.Ydebug, true) - } object FreshContext: /** Defines an initial context with given context base and possible settings. */ @@ -748,18 +733,16 @@ object Contexts { end ops // TODO: Fix issue when converting ModeChanges and FreshModeChanges to extension givens - extension (c: Context) { + extension (c: Context) final def withModeBits(mode: Mode): Context = if (mode != c.mode) c.fresh.setMode(mode) else c final def addMode(mode: Mode): Context = withModeBits(c.mode | mode) final def retractMode(mode: Mode): Context = withModeBits(c.mode &~ mode) - } - extension (c: FreshContext) { + extension (c: FreshContext) final def addMode(mode: Mode): c.type = c.setMode(c.mode | mode) final def retractMode(mode: Mode): c.type = c.setMode(c.mode &~ mode) - } /** Run `op` with a pool-allocated context that has an ExporeTyperState. */ inline def explore[T](inline op: Context ?=> T)(using Context): T = @@ -831,17 +814,16 @@ object Contexts { finally ctx.base.comparersInUse = saved end comparing - @sharable val NoContext: Context = new FreshContext((null: ContextBase | Null).uncheckedNN) { + @sharable val NoContext: Context = new FreshContext((null: ContextBase | Null).uncheckedNN): override val implicits: ContextualImplicits = new ContextualImplicits(Nil, null, false)(this: @unchecked) setSource(NoSource) - } /** A context base defines state and associated methods that exist once per * compiler run. */ class ContextBase extends ContextState with Phases.PhasesBase - with Plugins { + with Plugins: /** The applicable settings */ val settings: ScalaSettings = new ScalaSettings @@ -853,13 +835,12 @@ object Contexts { private var _platform: Platform | Null = uninitialized /** The platform */ - def platform: Platform = { + def platform: Platform = val p = _platform if p == null then throw new IllegalStateException( "initialize() must be called before accessing platform") p - } protected def newPlatform(using Context): Platform = if (settings.scalajs.value) new SJSPlatform @@ -877,14 +858,12 @@ object Contexts { /** Initializes the `ContextBase` with a starting context. * This initializes the `platform` and the `definitions`. */ - def initialize()(using Context): Unit = { + def initialize()(using Context): Unit = _platform = newPlatform definitions.init() - } def fusedContaining(p: Phase): Phase = allPhases.find(_.period.containsPhaseId(p.id)).getOrElse(NoPhase) - } class ContextPool: protected def fresh()(using Context): FreshContext = @@ -928,7 +907,7 @@ object Contexts { super.fresh().setTyperState(ts) /** The essential mutable state of a context base, collected into a common class */ - class ContextState { + class ContextState: // Symbols state /** Counter for unique symbol ids */ @@ -1053,5 +1032,3 @@ object Contexts { def checkSingleThreaded(): Unit = if (thread == null) thread = Thread.currentThread() else assert(thread == Thread.currentThread(), "illegal multithreaded access to ContextBase") - } -} diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 4ef0dbc9a43b..598b5ba9a959 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -12,7 +12,7 @@ import transform.MegaPhase import reporting.{Message, NoExplanation} /** This object provides useful extension methods for types defined elsewhere */ -object Decorators { +object Decorators: /** Extension methods for toType/TermName methods on PreNames. */ @@ -68,13 +68,11 @@ object Decorators { * works like find but avoids Option, replacing None with NoSymbol. */ extension (it: Iterator[Symbol]) - final def findSymbol(p: Symbol => Boolean): Symbol = { - while (it.hasNext) { + final def findSymbol(p: Symbol => Boolean): Symbol = + while (it.hasNext) val sym = it.next() if (p(sym)) return sym - } NoSymbol - } inline val MaxFilterRecursions = 10 @@ -83,32 +81,28 @@ object Decorators { */ extension [T](xs: List[T]) - final def mapconserve[U](f: T => U): List[U] = { + final def mapconserve[U](f: T => U): List[U] = @tailrec def loop(mapped: ListBuffer[U] | Null, unchanged: List[U], pending: List[T]): List[U] = if (pending.isEmpty) if (mapped == null) unchanged else mapped.prependToList(unchanged) - else { + else val head0 = pending.head val head1 = f(head0) if (head1.asInstanceOf[AnyRef] eq head0.asInstanceOf[AnyRef]) loop(mapped, unchanged, pending.tail) - else { + else val b = if (mapped == null) new ListBuffer[U] else mapped var xc = unchanged - while (xc ne pending) { + while (xc ne pending) b += xc.head xc = xc.tail - } b += head1 val tail0 = pending.tail loop(b, tail0.asInstanceOf[List[U]], tail0) - } - } loop(null, xs.asInstanceOf[List[U]], xs) - } /** Like `xs filter p` but returns list `xs` itself - instead of a copy - * if `p` is true for all elements. @@ -151,13 +145,12 @@ object Decorators { */ def zipWithConserve[U, V <: T](ys: List[U])(f: (T, U) => V): List[V] = if (xs.isEmpty || ys.isEmpty) Nil - else { + else val x1 = f(xs.head, ys.head) val xs1 = xs.tail.zipWithConserve(ys.tail)(f) if (x1.asInstanceOf[AnyRef] eq xs.head.asInstanceOf[AnyRef]) && (xs1 eq xs.tail) then xs.asInstanceOf[List[V]] else x1 :: xs1 - } /** Like `xs.lazyZip(xs.indices).map(f)`, but returns list `xs` itself * - instead of a copy - if function `f` maps all elements of @@ -191,23 +184,20 @@ object Decorators { /** True if two lists have the same length. Since calling length on linear sequences * is Θ(n), it is an inadvisable way to test length equality. This method is Θ(n min m). */ - final def hasSameLengthAs[U](ys: List[U]): Boolean = { + final def hasSameLengthAs[U](ys: List[U]): Boolean = @tailrec def loop(xs: List[T], ys: List[U]): Boolean = if (xs.isEmpty) ys.isEmpty else ys.nonEmpty && loop(xs.tail, ys.tail) loop(xs, ys) - } - @tailrec final def eqElements(ys: List[AnyRef]): Boolean = xs match { + @tailrec final def eqElements(ys: List[AnyRef]): Boolean = xs match case x :: _ => - ys match { + ys match case y :: _ => x.asInstanceOf[AnyRef].eq(y) && xs.tail.eqElements(ys.tail) case _ => false - } case nil => ys.isEmpty - } /** Union on lists seen as sets */ def setUnion (ys: List[T]): List[T] = xs ::: ys.filterNot(xs contains _) @@ -243,34 +233,30 @@ object Decorators { * a given phase. See [[config.CompilerCommand#explainAdvanced]] for the * exact meaning of "contains" here. */ - extension (names: List[String]) + extension (names: List[String]) def containsPhase(phase: Phase): Boolean = - names.nonEmpty && { - phase match { + names.nonEmpty `&&`: + phase match case phase: MegaPhase => phase.miniPhases.exists(x => names.containsPhase(x)) case _ => names exists { name => - name == "all" || { + name == "all" `||`: val strippedName = name.stripSuffix("+") val logNextPhase = name != strippedName phase.phaseName.startsWith(strippedName) || (logNextPhase && phase.prev.phaseName.startsWith(strippedName)) - } } - } - } extension [T](x: T) def showing[U]( op: WrappedResult[U] ?=> String, - printer: config.Printers.Printer = config.Printers.default)(using c: Conversion[T, U] | Null = null): T = { + printer: config.Printers.Printer = config.Printers.default)(using c: Conversion[T, U] | Null = null): T = // either the use of `$result` was driven by the expected type of `Shown` // which led to the summoning of `Conversion[T, Shown]` (which we'll invoke) // or no such conversion was found so we'll consume the result as it is instead val obj = if c == null then x.asInstanceOf[U] else c(x) printer.println(op(using WrappedResult(obj))) x - } /** Instead of `toString` call `show` on `Showable` values, falling back to `toString` if an exception is raised. */ def tryToShow(using Context): String = x match @@ -287,14 +273,12 @@ object Decorators { def className: String = x.getClass.getSimpleName.nn extension [T](x: T) - def assertingErrorsReported(using Context): T = { + def assertingErrorsReported(using Context): T = assert(ctx.reporter.errorsReported) x - } - def assertingErrorsReported(msg: Message)(using Context): T = { + def assertingErrorsReported(msg: Message)(using Context): T = assert(ctx.reporter.errorsReported, msg) x - } extension [T <: AnyRef](xs: ::[T]) def derivedCons(x1: T, xs1: List[T]) = @@ -315,4 +299,3 @@ object Decorators { extension [T <: AnyRef](arr: Array[T]) def binarySearch(x: T | Null): Int = java.util.Arrays.binarySearch(arr.asInstanceOf[Array[Object | Null]], x) -} diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index 3d11ad80733e..24026aa1faa3 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -20,7 +20,7 @@ import cc.{CapturingType, CaptureSet, EventuallyCapturingType} import scala.annotation.tailrec -object Definitions { +object Definitions: /** The maximum number of elements in a tuple or product. * This should be removed once we go to hlists. @@ -35,12 +35,11 @@ object Definitions { * else without affecting the set of programs that can be compiled. */ val MaxImplementedFunctionArity: Int = MaxTupleArity -} /** A class defining symbols and types of standard definitions * */ -class Definitions { +class Definitions: import Definitions._ private var initCtx: Context = _ @@ -70,9 +69,9 @@ class Definitions { // NOTE: Ideally we would write `parentConstrs: => Type*` but SIP-24 is only // implemented in Dotty and not in Scala 2. // See . - private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: => Seq[Type]): ClassSymbol = { - val completer = new LazyType { - def complete(denot: SymDenotation)(using Context): Unit = { + private def enterSpecialPolyClass(name: TypeName, paramFlags: FlagSet, parentConstrs: => Seq[Type]): ClassSymbol = + val completer = new LazyType: + def complete(denot: SymDenotation)(using Context): Unit = val cls = denot.asClass.classSymbol val paramDecls = newScope val typeParam = enterSyntheticTypeParam(cls, paramFlags, paramDecls) @@ -81,10 +80,7 @@ class Definitions { else tpe val parents = parentConstrs.toList map instantiate denot.info = ClassInfo(ScalaPackageClass.thisType, cls, parents, paramDecls) - } - } newPermanentClassSymbol(ScalaPackageClass, name, Artifact, completer).entered - } /** The trait FunctionN and ContextFunctionN for some N * @param name The name of the trait to be created @@ -108,10 +104,10 @@ class Definitions { * * type ImpureXYZFunctionN[-T0,...,-T{N-1}, +R] = {cap} XYZFunctionN[T0,...,T{N-1}, R] */ - private def newFunctionNType(name: TypeName): Symbol = { + private def newFunctionNType(name: TypeName): Symbol = val impure = name.startsWith("Impure") - val completer = new LazyType { - def complete(denot: SymDenotation)(using Context): Unit = { + val completer = new LazyType: + def complete(denot: SymDenotation)(using Context): Unit = val arity = name.functionArity if impure then val argParamNames = List.tabulate(arity)(tpnme.syntheticTypeParamName) @@ -138,13 +134,10 @@ class Definitions { decls.enter(newMethod(cls, nme.apply, methodType(argParamRefs, resParamRef), Deferred)) denot.info = ClassInfo(ScalaPackageClass.thisType, cls, ObjectType :: Nil, decls) - } - } if impure then newPermanentSymbol(ScalaPackageClass, name, EmptyFlags, completer) else newPermanentClassSymbol(ScalaPackageClass, name, Trait | NoInits, completer) - } private def newMethod(cls: ClassSymbol, name: TermName, info: Type, flags: FlagSet = EmptyFlags): TermSymbol = newPermanentSymbol(cls, name, flags | Method, info).asTerm @@ -169,34 +162,30 @@ class Definitions { resultTypeFn: PolyType => Type, flags: FlagSet = EmptyFlags, bounds: TypeBounds = TypeBounds.empty, - useCompleter: Boolean = false) = { + useCompleter: Boolean = false) = val tparamNames = PolyType.syntheticParamNames(typeParamCount) val tparamInfos = tparamNames map (_ => bounds) def ptype = PolyType(tparamNames)(_ => tparamInfos, resultTypeFn) val info = if (useCompleter) - new LazyType { + new LazyType: def complete(denot: SymDenotation)(using Context): Unit = denot.info = ptype - } else ptype enterMethod(cls, name, info, flags) - } private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) = enterPolyMethod(cls, name, 1, resultTypeFn, flags) - private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef | Null] = { + private def mkArityArray(name: String, arity: Int, countFrom: Int): Array[TypeRef | Null] = val arr = new Array[TypeRef | Null](arity + 1) for (i <- countFrom to arity) arr(i) = requiredClassRef(name + i) arr - } - private def completeClass(cls: ClassSymbol, ensureCtor: Boolean = true): ClassSymbol = { + private def completeClass(cls: ClassSymbol, ensureCtor: Boolean = true): ClassSymbol = if (ensureCtor) ensureConstructor(cls, cls.denot.asClass, EmptyScope) if (cls.linkedClass.exists) cls.linkedClass.markAbsent() cls - } @tu lazy val RootClass: ClassSymbol = newPackageSymbol( NoSymbol, nme.ROOT, (root, rootcls) => ctx.base.rootLoader(root)).moduleClass.asClass @@ -213,14 +202,13 @@ class Definitions { @tu lazy val ScalaPackageVal: TermSymbol = requiredPackage(nme.scala) @tu lazy val ScalaMathPackageVal: TermSymbol = requiredPackage("scala.math") - @tu lazy val ScalaPackageClass: ClassSymbol = { + @tu lazy val ScalaPackageClass: ClassSymbol = val cls = ScalaPackageVal.moduleClass.asClass cls.info.decls.openForMutations.useSynthesizer( name => if (name.isTypeName && name.isSyntheticFunction) newFunctionNType(name.asTypeName) else NoSymbol) cls - } @tu lazy val ScalaPackageObject: Symbol = requiredModule("scala.package") @tu lazy val ScalaRuntimePackageVal: TermSymbol = requiredPackage("scala.runtime") @tu lazy val ScalaRuntimePackageClass: ClassSymbol = ScalaRuntimePackageVal.moduleClass.asClass @@ -308,10 +296,10 @@ class Definitions { Final, bounds = TypeBounds.lower(AnyClass.thisType)) - def AnyMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, + def AnyMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_asInstanceOf, Any_typeTest, Any_typeCast) - @tu lazy val ObjectClass: ClassSymbol = { + @tu lazy val ObjectClass: ClassSymbol = val cls = requiredClass("java.lang.Object") assert(!cls.isCompleted, "race for completing java.lang.Object") cls.info = ClassInfo(cls.owner.thisType, cls, List(AnyType, MatchableType), newScope) @@ -321,7 +309,6 @@ class Definitions { val companion = JavaLangPackageVal.info.decl(nme.Object).symbol.asTerm NamerOps.makeConstructorCompanion(companion, cls) cls - } def ObjectType: TypeRef = ObjectClass.typeRef /** A type alias of Object used to represent any reference to Object in a Java @@ -443,13 +430,12 @@ class Definitions { @tu lazy val pureMethods: List[TermSymbol] = List(Any_==, Any_!=, Any_equals, Any_hashCode, Any_toString, Any_##, Any_getClass, Any_isInstanceOf, Any_typeTest, Object_eq, Object_ne) - @tu lazy val AnyKindClass: ClassSymbol = { + @tu lazy val AnyKindClass: ClassSymbol = val cls = newCompleteClassSymbol(ScalaPackageClass, tpnme.AnyKind, AbstractFinal | Permanent, Nil, newScope(0)) if (!ctx.settings.YnoKindPolymorphism.value) // Enable kind-polymorphism by exposing scala.AnyKind cls.entered cls - } def AnyKindType: TypeRef = AnyKindClass.typeRef @tu lazy val andType: TypeSymbol = enterBinaryAlias(tpnme.AND, AndType(_, _)) @@ -462,11 +448,10 @@ class Definitions { @tu lazy val NothingClass: ClassSymbol = enterCompleteClassSymbol( ScalaPackageClass, tpnme.Nothing, AbstractFinal, List(AnyType)) def NothingType: TypeRef = NothingClass.typeRef - @tu lazy val NullClass: ClassSymbol = { + @tu lazy val NullClass: ClassSymbol = // When explicit-nulls is enabled, Null becomes a direct subtype of Any and Matchable val parents = if ctx.explicitNulls then AnyType :: MatchableType :: Nil else ObjectType :: Nil enterCompleteClassSymbol(ScalaPackageClass, tpnme.Null, AbstractFinal, parents) - } def NullType: TypeRef = NullClass.typeRef @tu lazy val InvokerModule = requiredModule("scala.runtime.coverage.Invoker") @@ -572,7 +557,7 @@ class Definitions { case List(pt) => pt.isRef(BooleanClass) case _ => false }).symbol - @tu lazy val Boolean_!= : Symbol = + @tu lazy val Boolean_!= : Symbol = BooleanClass.info.member(nme.NE).suchThat(_.info.firstParamTypes match { case List(pt) => pt.isRef(BooleanClass) case _ => false @@ -690,36 +675,30 @@ class Definitions { @tu lazy val JavaFormattableClass: ClassSymbol = requiredClass("java.util.Formattable") @tu lazy val JavaRecordClass: Symbol = getClassIfDefined("java.lang.Record") - @tu lazy val JavaEnumClass: ClassSymbol = { + @tu lazy val JavaEnumClass: ClassSymbol = val cls = requiredClass("java.lang.Enum") // jl.Enum has a single constructor protected(name: String, ordinal: Int). // We remove the arguments from the primary constructor, and enter // a new constructor symbol with 2 arguments, so that both // `X extends jl.Enum[X]` and `X extends jl.Enum[X](name, ordinal)` // pass typer and go through jl.Enum-specific checks in RefChecks. - cls.infoOrCompleter match { + cls.infoOrCompleter match case completer: ClassfileLoader => - cls.info = new ClassfileLoader(completer.classfile) { - override def complete(root: SymDenotation)(using Context): Unit = { + cls.info = new ClassfileLoader(completer.classfile): + override def complete(root: SymDenotation)(using Context): Unit = super.complete(root) val constr = cls.primaryConstructor - val noArgInfo = constr.info match { + val noArgInfo = constr.info match case info: PolyType => - info.resType match { + info.resType match case meth: MethodType => info.derivedLambdaType( resType = meth.derivedLambdaType( paramNames = Nil, paramInfos = Nil)) - } - } val argConstr = constr.copy().entered constr.info = noArgInfo constr.termRef.recomputeDenot() - } - } cls - } - } def JavaEnumType = JavaEnumClass.typeRef @tu lazy val MethodHandleClass: ClassSymbol = requiredClass("java.lang.invoke.MethodHandle") @@ -1087,10 +1066,9 @@ class Definitions { def RepeatedParamType: TypeRef = RepeatedParamClass.typeRef - def ClassType(arg: Type)(using Context): Type = { + def ClassType(arg: Type)(using Context): Type = val ctype = ClassClass.typeRef if (ctx.phase.erasedTypes) ctype else ctype.appliedTo(arg) - } /** The enumeration type, goven a value of the enumeration */ def EnumType(sym: Symbol)(using Context): TypeRef = @@ -1100,14 +1078,14 @@ class Definitions { // - .linkedClass: the ClassSymbol of the enumeration (class E) sym.owner.linkedClass.typeRef - object FunctionOf { + object FunctionOf: def apply(args: List[Type], resultType: Type, isContextual: Boolean = false)(using Context): Type = val mt = MethodType.companion(isContextual, false)(args, resultType) if mt.hasErasedParams then RefinedType(ErasedFunctionClass.typeRef, nme.apply, mt) else FunctionType(args.length, isContextual).appliedTo(args ::: resultType :: Nil) - def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean)] = { + def unapply(ft: Type)(using Context): Option[(List[Type], Type, Boolean)] = ft.dealias match case RefinedType(parent, nme.apply, mt: MethodType) if isErasedFunctionType(parent) => Some(mt.paramInfos, mt.resType, mt.isContextualMethod) @@ -1118,47 +1096,38 @@ class Definitions { if (targs.isEmpty) None else Some(targs.init, targs.last, tsym.name.isContextFunction) else None - } - } - object PartialFunctionOf { + object PartialFunctionOf: def apply(arg: Type, result: Type)(using Context): Type = PartialFunctionClass.typeRef.appliedTo(arg :: result :: Nil) def unapply(pft: Type)(using Context): Option[(Type, List[Type])] = - if (pft.isRef(PartialFunctionClass)) { + if (pft.isRef(PartialFunctionClass)) val targs = pft.dealias.argInfos if (targs.length == 2) Some((targs.head, targs.tail)) else None - } else None - } - object ArrayOf { + object ArrayOf: def apply(elem: Type)(using Context): Type = if (ctx.erasedTypes) JavaArrayType(elem) else ArrayType.appliedTo(elem :: Nil) - def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match { + def unapply(tp: Type)(using Context): Option[Type] = tp.dealias match case AppliedType(at, arg :: Nil) if at.isRef(ArrayType.symbol) => Some(arg) case JavaArrayType(tp) if ctx.erasedTypes => Some(tp) case _ => None - } - } - object MatchCase { + object MatchCase: def apply(pat: Type, body: Type)(using Context): Type = MatchCaseClass.typeRef.appliedTo(pat, body) - def unapply(tp: Type)(using Context): Option[(Type, Type)] = tp match { + def unapply(tp: Type)(using Context): Option[(Type, Type)] = tp match case AppliedType(tycon, pat :: body :: Nil) if tycon.isRef(MatchCaseClass) => Some((pat, body)) case _ => None - } - def isInstance(tp: Type)(using Context): Boolean = tp match { + def isInstance(tp: Type)(using Context): Boolean = tp match case AppliedType(tycon: TypeRef, _) => tycon.name == tpnme.MatchCase && // necessary pre-filter to avoid forcing symbols tycon.isRef(MatchCaseClass) case _ => false - } - } /** An extractor for multi-dimensional arrays. * Note that this will also extract the high bound if an @@ -1170,22 +1139,19 @@ class Definitions { * * MultiArrayOf(, 2) */ - object MultiArrayOf { + object MultiArrayOf: def apply(elem: Type, ndims: Int)(using Context): Type = if (ndims == 0) elem else ArrayOf(apply(elem, ndims - 1)) - def unapply(tp: Type)(using Context): Option[(Type, Int)] = tp match { + def unapply(tp: Type)(using Context): Option[(Type, Int)] = tp match case ArrayOf(elemtp) => - def recur(elemtp: Type): Option[(Type, Int)] = elemtp.dealias match { + def recur(elemtp: Type): Option[(Type, Int)] = elemtp.dealias match case tp @ TypeBounds(lo, hi @ MultiArrayOf(finalElemTp, n)) => Some(finalElemTp, n) case MultiArrayOf(finalElemTp, n) => Some(finalElemTp, n + 1) case _ => Some(elemtp, 1) - } recur(elemtp) case _ => None - } - } /** Extractor for context function types representing by-name parameters, of the form * `() ?=> T`. @@ -1438,7 +1404,7 @@ class Definitions { private def funTypeIdx(isContextual: Boolean, isImpure: Boolean): Int = (if isContextual then 1 else 0) - + (if isImpure then 2 else 0) + + (if isImpure then 2 else 0) private val funTypeArray: IArray[FunType] = val arr = Array.ofDim[FunType](8) @@ -1523,10 +1489,9 @@ class Definitions { * - FunctionN for N >= 22 * - ContextFunctionN for N >= 22 */ - def isXXLFunctionClass(cls: Symbol): Boolean = { + def isXXLFunctionClass(cls: Symbol): Boolean = val name = scalaClassName(cls) (name eq tpnme.FunctionXXL) || name.functionArity > MaxImplementedFunctionArity - } /** Is a synthetic function class * - FunctionN for N > 22 @@ -1644,34 +1609,29 @@ class Definitions { * * @return true if the dealiased type of `tp` is `TupleN[T1, T2, ..., Tn]` */ - def isTupleNType(tp: Type)(using Context): Boolean = { + def isTupleNType(tp: Type)(using Context): Boolean = val tp1 = tp.dealias val arity = tp1.argInfos.length - arity <= MaxTupleArity && { + arity <= MaxTupleArity `&&`: val tupletp = TupleType(arity) tupletp != null && tp1.isRef(tupletp.symbol) - } - } - def tupleType(elems: List[Type]): Type = { + def tupleType(elems: List[Type]): Type = val arity = elems.length if 0 < arity && arity <= MaxTupleArity then val tupletp = TupleType(arity) if tupletp != null then tupletp.appliedTo(elems) else TypeOps.nestedPairs(elems) else TypeOps.nestedPairs(elems) - } - def tupleTypes(tp: Type, bound: Int = Int.MaxValue)(using Context): Option[List[Type]] = { - @tailrec def rec(tp: Type, acc: List[Type], bound: Int): Option[List[Type]] = tp.normalized.dealias match { + def tupleTypes(tp: Type, bound: Int = Int.MaxValue)(using Context): Option[List[Type]] = + @tailrec def rec(tp: Type, acc: List[Type], bound: Int): Option[List[Type]] = tp.normalized.dealias match case _ if bound < 0 => Some(acc.reverse) case tp: AppliedType if PairClass == tp.classSymbol => rec(tp.args(1), tp.args.head :: acc, bound - 1) case tp: AppliedType if isTupleNType(tp) => Some(acc.reverse ::: tp.args) case tp: TermRef if tp.symbol == defn.EmptyTupleModule => Some(acc.reverse) case _ => None - } rec(tp.stripTypeVar, Nil, bound) - } def isSmallGenericTuple(tp: Type)(using Context): Boolean = if tp.derivesFrom(defn.PairClass) && !defn.isTupleNType(tp.widenDealias) then @@ -1846,11 +1806,10 @@ class Definitions { case _ => None /* Returns a list of erased booleans marking whether parameters are erased, for a function type. */ - def erasedFunctionParameters(tp: Type)(using Context): List[Boolean] = tp.dealias match { + def erasedFunctionParameters(tp: Type)(using Context): List[Boolean] = tp.dealias match case RefinedType(parent, nme.apply, mt: MethodType) => mt.erasedParams case tp if isFunctionType(tp) => List.fill(functionArity(tp)) { false } case _ => Nil - } def isErasedFunctionType(tp: Type)(using Context): Boolean = tp.derivesFrom(defn.ErasedFunctionClass) @@ -1860,13 +1819,12 @@ class Definitions { (sym `eq` SomeClass) || isTupleClass(sym) /** If `cls` is Tuple1..Tuple22, add the corresponding *: type as last parent to `parents` */ - def adjustForTuple(cls: ClassSymbol, tparams: List[TypeSymbol], parents: List[Type]): List[Type] = { + def adjustForTuple(cls: ClassSymbol, tparams: List[TypeSymbol], parents: List[Type]): List[Type] = if !isTupleClass(cls) then parents else if tparams.isEmpty then parents :+ TupleTypeRef else assert(parents.head.typeSymbol == ObjectClass) TypeOps.nestedPairs(tparams.map(_.typeRef)) :: parents.tail - } /** If it is BoxedUnit, remove `java.io.Serializable` from `parents`. */ def adjustForBoxedUnit(cls: ClassSymbol, parents: List[Type]): List[Type] = @@ -1956,17 +1914,14 @@ class Definitions { // ----- primitive value class machinery ------------------------------------------ - class PerRun[T](generate: Context ?=> T) { + class PerRun[T](generate: Context ?=> T): private var current: RunId = NoRunId private var cached: T = _ - def apply()(using Context): T = { - if (current != ctx.runId) { + def apply()(using Context): T = + if (current != ctx.runId) cached = generate current = ctx.runId - } cached - } - } @tu lazy val ScalaNumericValueTypeList: List[TypeRef] = List( ByteType, ShortType, CharType, IntType, LongType, FloatType, DoubleType) @@ -1988,7 +1943,7 @@ class Definitions { // private val javaTypeToValueTypeRef = mutable.Map[Class[?], TypeRef]() // private val valueTypeNamesToJavaType = mutable.Map[TypeName, Class[?]]() - private def valueTypeRef(name: String, jtype: Class[?], enc: Int, tag: Name): TypeRef = { + private def valueTypeRef(name: String, jtype: Class[?], enc: Int, tag: Name): TypeRef = val vcls = requiredClassRef(name) valueTypeEnc(vcls.name) = enc typeTags(vcls.name) = tag @@ -1996,7 +1951,6 @@ class Definitions { // javaTypeToValueTypeRef(jtype) = vcls // valueTypeNamesToJavaType(vcls.name) = jtype vcls - } /** The type of the boxed class corresponding to primitive value type `tp`. */ def boxedType(tp: Type)(using Context): TypeRef = { @@ -2013,7 +1967,7 @@ class Definitions { else sys.error(s"Not a primitive value type: $tp") }.typeRef - def unboxedType(tp: Type)(using Context): TypeRef = { + def unboxedType(tp: Type)(using Context): TypeRef = val cls = tp.classSymbol if (cls eq BoxedByteClass) ByteType else if (cls eq BoxedShortClass) ShortType @@ -2025,7 +1979,6 @@ class Definitions { else if (cls eq BoxedUnitClass) UnitType else if (cls eq BoxedBooleanClass) BooleanType else sys.error(s"Not a boxed primitive value type: $tp") - } /** The JVM tag for `tp` if it's a primitive, `java.lang.Object` otherwise. */ def typeTag(tp: Type)(using Context): Name = typeTags(scalaClassName(tp)) @@ -2094,16 +2047,14 @@ class Definitions { private var isInitialized = false - def init()(using Context): Unit = { + def init()(using Context): Unit = this.initCtx = ctx - if (!isInitialized) { + if (!isInitialized) // force initialization of every symbol that is synthesized or hijacked by the compiler val forced = syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() :+ JavaEnumClass isInitialized = true - } addSyntheticSymbolsComments - } /** Definitions used in Lazy Vals implementation */ val LazyValsModuleName = "scala.runtime.LazyVals" @@ -2494,4 +2445,3 @@ class Definitions { | * See also [[https://docs.scala-lang.org/sips/42.type.html SIP-23 about Literal-based Singleton Types]]. | */ """.stripMargin) -} diff --git a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala index 6690cae3a142..db47638a5117 100644 --- a/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala +++ b/compiler/src/dotty/tools/dotc/core/DenotTransformers.scala @@ -9,7 +9,7 @@ import Symbols._ import Denotations._ import Phases._ -object DenotTransformers { +object DenotTransformers: /** A transformer group contains a sequence of transformers, * ordered by the phase where they apply. Transformers are added @@ -17,7 +17,7 @@ object DenotTransformers { */ /** A transformer transforms denotations at a given phase */ - trait DenotTransformer extends Phase { + trait DenotTransformer extends Phase: /** The last phase during which the transformed denotations are valid */ def lastPhaseId(using Context): Int = ctx.base.nextDenotTransformerId(id + 1) @@ -28,55 +28,46 @@ object DenotTransformers { /** The transformation method */ def transform(ref: SingleDenotation)(using Context): SingleDenotation - } /** A transformer that only transforms the info field of denotations */ - trait InfoTransformer extends DenotTransformer { + trait InfoTransformer extends DenotTransformer: def transformInfo(tp: Type, sym: Symbol)(using Context): Type - def transform(ref: SingleDenotation)(using Context): SingleDenotation = { + def transform(ref: SingleDenotation)(using Context): SingleDenotation = val sym = ref.symbol if (sym.exists && !infoMayChange(sym)) ref - else { + else val info1 = transformInfo(ref.info, ref.symbol) if (info1 eq ref.info) ref - else ref match { + else ref match case ref: SymDenotation => ref.copySymDenotation(info = info1).copyCaches(ref, ctx.phase.next) case _ => ref.derivedSingleDenotation(ref.symbol, info1) - } - } - } /** Denotations with a symbol where `infoMayChange` is false are guaranteed to be * unaffected by this transform, so `transformInfo` need not be run. This * can save time, and more importantly, can help avoid forcing symbol completers. */ protected def infoMayChange(sym: Symbol)(using Context): Boolean = true - } /** A transformer that only transforms SymDenotations. * Note: Infos of non-sym denotations are left as is. So the transformer should * be used before erasure only if this is not a problem. After erasure, all * denotations are SymDenotations, so SymTransformers can be used freely. */ - trait SymTransformer extends DenotTransformer { + trait SymTransformer extends DenotTransformer: def transformSym(sym: SymDenotation)(using Context): SymDenotation - def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref match { + def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref match case ref: SymDenotation => transformSym(ref) case _ => ref - } - } /** A `DenotTransformer` trait that has the identity as its `transform` method. * You might want to inherit from this trait so that new denotations can be * installed using `installAfter` and `enteredAfter` at the end of the phase. */ - trait IdentityDenotTransformer extends DenotTransformer { + trait IdentityDenotTransformer extends DenotTransformer: def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref - } -} diff --git a/compiler/src/dotty/tools/dotc/core/Denotations.scala b/compiler/src/dotty/tools/dotc/core/Denotations.scala index 82368fd4dbf5..c0b6df4fd4e2 100644 --- a/compiler/src/dotty/tools/dotc/core/Denotations.scala +++ b/compiler/src/dotty/tools/dotc/core/Denotations.scala @@ -68,14 +68,14 @@ import collection.mutable.ListBuffer * annotations: List[Annotation] * ClassDenotation A denotation representing a single class definition. */ -object Denotations { +object Denotations: implicit def eqDenotation: CanEqual[Denotation, Denotation] = CanEqual.derived /** A PreDenotation represents a group of single denotations or a single multi-denotation * It is used as an optimization to avoid forming MultiDenotations too eagerly. */ - abstract class PreDenotation { + abstract class PreDenotation: /** A denotation in the group exists */ def exists: Boolean @@ -129,14 +129,12 @@ object Denotations { /** The denotation with info(s) as seen from prefix type */ def asSeenFrom(pre: Type)(using Context): AsSeenFromResult = - if (Config.cacheAsSeenFrom) { - if ((cachedPrefix ne pre) || ctx.period != validAsSeenFrom) { + if (Config.cacheAsSeenFrom) + if ((cachedPrefix ne pre) || ctx.period != validAsSeenFrom) cachedAsSeenFrom = computeAsSeenFrom(pre) cachedPrefix = pre validAsSeenFrom = if (pre.isProvisional) Nowhere else ctx.period - } cachedAsSeenFrom - } else computeAsSeenFrom(pre) protected def computeAsSeenFrom(pre: Type)(using Context): AsSeenFromResult @@ -146,7 +144,6 @@ object Denotations { if (!this.exists) that else if (!that.exists) this else DenotUnion(this, that) - } /** A denotation is the result of resolving * a name (either simple identifier or select) during a given period. @@ -175,19 +172,17 @@ object Denotations { * * @param symbol The referencing symbol, or NoSymbol is none exists */ - abstract class Denotation(val symbol: Symbol, protected var myInfo: Type, val isType: Boolean) extends PreDenotation with printing.Showable { + abstract class Denotation(val symbol: Symbol, protected var myInfo: Type, val isType: Boolean) extends PreDenotation with printing.Showable: type AsSeenFromResult <: Denotation /** The type info. * The info is an instance of TypeType iff this is a type denotation * Uncompleted denotations set myInfo to a LazyType. */ - final def info(using Context): Type = { - def completeInfo = { // Written this way so that `info` is small enough to be inlined + final def info(using Context): Type = + def completeInfo = // Written this way so that `info` is small enough to be inlined this.asInstanceOf[SymDenotation].completeFrom(myInfo.asInstanceOf[LazyType]); info - } if (myInfo.isInstanceOf[LazyType]) completeInfo else myInfo - } /** The type info, or, if this is a SymDenotation where the symbol * is not yet completed, the completer @@ -227,10 +222,9 @@ object Denotations { private var myValidFor: Period = Nowhere final def validFor: Period = myValidFor - final def validFor_=(p: Period): Unit = { + final def validFor_=(p: Period): Unit = myValidFor = p symbol.invalidateDenotCache() - } /** Is this denotation different from NoDenotation or an ErrorDenotation? */ def exists: Boolean = true @@ -279,10 +273,9 @@ object Denotations { * single-denotations that do not satisfy the predicate are left alone * (whereas suchThat would map them to NoDenotation). */ - inline def disambiguate(inline p: Symbol => Boolean)(using Context): SingleDenotation = this match { + inline def disambiguate(inline p: Symbol => Boolean)(using Context): SingleDenotation = this match case sdenot: SingleDenotation => sdenot case mdenot => suchThat(p) orElse NoQualifyingRef(alternatives) - } /** Return symbol in this denotation that satisfies the given predicate. * if generateStubs is specified, return a stubsymbol if denotation is a missing ref. @@ -296,7 +289,7 @@ object Denotations { generateStubs: Boolean = true) (p: Symbol => Boolean) (using Context): Symbol = - disambiguate(p) match { + disambiguate(p) match case m @ MissingRef(ownerd, name) if generateStubs => if ctx.settings.YdebugMissingRefs.value then m.ex.printStackTrace() newStubSymbol(ownerd.symbol, name, source) @@ -308,61 +301,51 @@ object Denotations { throw TypeError(msg) case denot => denot.symbol - } - def requiredMethod(pname: PreName)(using Context): TermSymbol = { + def requiredMethod(pname: PreName)(using Context): TermSymbol = val name = pname.toTermName info.member(name).requiredSymbol("method", name, this)(_.is(Method)).asTerm - } def requiredMethodRef(name: PreName)(using Context): TermRef = requiredMethod(name).termRef - def requiredMethod(pname: PreName, argTypes: List[Type])(using Context): TermSymbol = { + def requiredMethod(pname: PreName, argTypes: List[Type])(using Context): TermSymbol = val name = pname.toTermName info.member(name).requiredSymbol("method", name, this, argTypes) { x => - x.is(Method) && { - x.info.paramInfoss match { + x.is(Method) `&&`: + x.info.paramInfoss match case paramInfos :: Nil => paramInfos.corresponds(argTypes)(_ =:= _) case _ => false - } - } }.asTerm - } def requiredMethodRef(name: PreName, argTypes: List[Type])(using Context): TermRef = requiredMethod(name, argTypes).termRef - def requiredValue(pname: PreName)(using Context): TermSymbol = { + def requiredValue(pname: PreName)(using Context): TermSymbol = val name = pname.toTermName info.member(name).requiredSymbol("field or getter", name, this)(_.info.isParameterless).asTerm - } def requiredValueRef(name: PreName)(using Context): TermRef = requiredValue(name).termRef - def requiredClass(pname: PreName)(using Context): ClassSymbol = { + def requiredClass(pname: PreName)(using Context): ClassSymbol = val name = pname.toTypeName info.member(name).requiredSymbol("class", name, this)(_.isClass).asClass - } - def requiredType(pname: PreName)(using Context): TypeSymbol = { + def requiredType(pname: PreName)(using Context): TypeSymbol = val name = pname.toTypeName info.member(name).requiredSymbol("type", name, this)(_.isType).asType - } /** The alternative of this denotation that has a type matching `targetType` when seen * as a member of type `site` and that has a target name matching `targetName`, or * `NoDenotation` if none exists. */ - def matchingDenotation(site: Type, targetType: Type, targetName: Name)(using Context): SingleDenotation = { + def matchingDenotation(site: Type, targetType: Type, targetName: Name)(using Context): SingleDenotation = def qualifies(sym: Symbol) = site.memberInfo(sym).matchesLoosely(targetType) && sym.hasTargetName(targetName) if (isOverloaded) - atSignature(targetType.signature, targetName, site, relaxed = true) match { + atSignature(targetType.signature, targetName, site, relaxed = true) match case sd: SingleDenotation => sd.matchingDenotation(site, targetType, targetName) case md => md.suchThat(qualifies(_)) - } else if (exists && !qualifies(symbol)) NoDenotation else asSingleDenotation - } /** Form a denotation by conjoining with denotation `that`. * @@ -393,22 +376,20 @@ object Denotations { * 5. The symbol's visibility is strictly greater than the other one's. * 6. The symbol is a method, but the other one is not. */ - def meet(that: Denotation, pre: Type, safeIntersection: Boolean = false)(using Context): Denotation = { + def meet(that: Denotation, pre: Type, safeIntersection: Boolean = false)(using Context): Denotation = /** Try to merge denot1 and denot2 without adding a new signature. */ - def mergeDenot(denot1: Denotation, denot2: SingleDenotation): Denotation = denot1 match { + def mergeDenot(denot1: Denotation, denot2: SingleDenotation): Denotation = denot1 match case denot1 @ MultiDenotation(denot11, denot12) => val d1 = mergeDenot(denot11, denot2) if (d1.exists) denot1.derivedUnionDenotation(d1, denot12) - else { + else val d2 = mergeDenot(denot12, denot2) if (d2.exists) denot1.derivedUnionDenotation(denot11, d2) else NoDenotation - } case denot1: SingleDenotation => if (denot1 eq denot2) denot1 else if denot1.matches(denot2) then mergeSingleDenot(denot1, denot2) else NoDenotation - } /** Try to merge single-denotations. */ def mergeSingleDenot(denot1: SingleDenotation, denot2: SingleDenotation): Denotation = @@ -494,14 +475,12 @@ object Denotations { if (this eq that) this else if (!this.exists) that else if (!that.exists) this - else that match { + else that match case that: SingleDenotation => val r = mergeDenot(this, that) if (r.exists) r else MultiDenotation(this, that) case that @ MultiDenotation(denot1, denot2) => this.meet(denot1, pre).meet(denot2, pre) - } - } final def asSingleDenotation: SingleDenotation = asInstanceOf[SingleDenotation] final def asSymDenotation: SymDenotation = asInstanceOf[SymDenotation] @@ -512,7 +491,6 @@ object Denotations { final def toDenot(pre: Type)(using Context): Denotation = this final def containsSym(sym: Symbol): Boolean = hasUniqueSym && (symbol eq sym) - } // ------ Info meets ---------------------------------------------------- @@ -573,7 +551,7 @@ object Denotations { end infoMeet /** A non-overloaded denotation */ - abstract class SingleDenotation(symbol: Symbol, initInfo: Type, isType: Boolean) extends Denotation(symbol, initInfo, isType) { + abstract class SingleDenotation(symbol: Symbol, initInfo: Type, isType: Boolean) extends Denotation(symbol, initInfo, isType): protected def newLikeThis(symbol: Symbol, info: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation final def name(using Context): Name = symbol.name @@ -687,13 +665,12 @@ object Denotations { */ def initial: SingleDenotation = if (validFor.firstPhaseId <= 1) this - else { + else var current = nextInRun while (current.validFor.code > this.validFor.code) current = current.nextInRun current - } - def history: List[SingleDenotation] = { + def history: List[SingleDenotation] = val b = new ListBuffer[SingleDenotation] var current = initial while ({ @@ -703,12 +680,11 @@ object Denotations { }) () b.toList - } /** Invalidate all caches and fields that depend on base classes and their contents */ def invalidateInheritedInfo(): Unit = () - private def updateValidity()(using Context): this.type = { + private def updateValidity()(using Context): this.type = assert( ctx.runId >= validFor.runId || ctx.settings.YtestPickler.value // mixing test pickler with debug printing can travel back in time @@ -724,7 +700,6 @@ object Denotations { }) () this - } /** Move validity period of this denotation to a new run. Throw a StaleSymbol error * if denotation is no longer valid. @@ -736,8 +711,8 @@ object Denotations { * - If the symbol did not have a denotation that was defined at the current phase * return a NoDenotation instead. */ - private def bringForward()(using Context): SingleDenotation = { - this match { + private def bringForward()(using Context): SingleDenotation = + this match case symd: SymDenotation => if (stillValid(symd)) return updateValidity() if acceptStale(symd) && symd.initial.validFor.firstPhaseId <= ctx.lastPhaseId then @@ -745,26 +720,22 @@ object Denotations { // visible at all. TabCompleteTests have examples where this happens. return symd.currentSymbol.denot.orElse(symd).updateValidity() case _ => - } if (!symbol.exists) return updateValidity() if (!coveredInterval.containsPhaseId(ctx.phaseId)) return NoDenotation if (ctx.debug) traceInvalid(this) staleSymbolError - } /** The next defined denotation (following `nextInRun`) or an arbitrary * undefined denotation, if all denotations in a `nextinRun` cycle are * undefined. */ - private def nextDefined: SingleDenotation = { + private def nextDefined: SingleDenotation = var p1 = this var p2 = nextInRun - while (p1.validFor == Nowhere && (p1 ne p2)) { + while (p1.validFor == Nowhere && (p1 ne p2)) p1 = p1.nextInRun p2 = p2.nextInRun.nextInRun - } p1 - } /** Skip any denotations that have been removed by an installAfter or that * are otherwise undefined. @@ -882,48 +853,41 @@ object Denotations { * It's placed here because it needs access to private fields of SingleDenotation. * @pre Can only be called in `phase.next`. */ - protected def installAfter(phase: DenotTransformer)(using Context): Unit = { + protected def installAfter(phase: DenotTransformer)(using Context): Unit = val targetId = phase.next.id if (ctx.phaseId != targetId) atPhase(phase.next)(installAfter(phase)) - else { + else val current = symbol.current // println(s"installing $this after $phase/${phase.id}, valid = ${current.validFor}") // printPeriods(current) this.validFor = Period(ctx.runId, targetId, current.validFor.lastPhaseId) if (current.validFor.firstPhaseId >= targetId) current.replaceWith(this) - else { + else current.validFor = Period(ctx.runId, current.validFor.firstPhaseId, targetId - 1) insertAfter(current) - } - } // printPeriods(this) - } /** Apply a transformation `f` to all denotations in this group that start at or after * given phase. Denotations are replaced while keeping the same validity periods. */ - protected def transformAfter(phase: DenotTransformer, f: SymDenotation => SymDenotation)(using Context): Unit = { + protected def transformAfter(phase: DenotTransformer, f: SymDenotation => SymDenotation)(using Context): Unit = var current = symbol.current while (current.validFor.firstPhaseId < phase.id && (current.nextInRun.validFor.code > current.validFor.code)) current = current.nextInRun var hasNext = true - while ((current.validFor.firstPhaseId >= phase.id) && hasNext) { + while ((current.validFor.firstPhaseId >= phase.id) && hasNext) val current1: SingleDenotation = f(current.asSymDenotation) - if (current1 ne current) { + if (current1 ne current) current1.validFor = current.validFor current.replaceWith(current1) - } hasNext = current1.nextInRun.validFor.code > current1.validFor.code current = current1.nextInRun - } - } /** Insert this denotation so that it follows `prev`. */ - private def insertAfter(prev: SingleDenotation) = { + private def insertAfter(prev: SingleDenotation) = this.nextInRun = prev.nextInRun prev.nextInRun = this - } /** Insert this denotation instead of `old`. * Also ensure that `old` refers with `nextInRun` to this denotation @@ -934,7 +898,7 @@ object Denotations { * The code to achieve this is subtle in that it works correctly * whether the replaced denotation is the only one in its cycle or not. */ - private[dotc] def replaceWith(newd: SingleDenotation): Unit = { + private[dotc] def replaceWith(newd: SingleDenotation): Unit = var prev = this while (prev.nextInRun ne this) prev = prev.nextInRun // order of next two assignments is important! @@ -942,23 +906,20 @@ object Denotations { newd.nextInRun = nextInRun validFor = Nowhere nextInRun = newd - } def staleSymbolError(using Context): Nothing = throw new StaleSymbol(staleSymbolMsg) - def staleSymbolMsg(using Context): String = { - def ownerMsg = this match { + def staleSymbolMsg(using Context): String = + def ownerMsg = this match case denot: SymDenotation => s"in ${denot.owner}" case _ => "" - } s"stale symbol; $this#${symbol.id} $ownerMsg, defined in ${validFor}, is referred to in run ${ctx.period}" - } /** The period (interval of phases) for which there exists * a valid denotation in this flock. */ - def coveredInterval(using Context): Period = { + def coveredInterval(using Context): Period = var cur = this var cnt = 0 var interval = validFor @@ -971,7 +932,6 @@ object Denotations { }) () interval - } /** Show declaration string; useful for showing declarations * as seen from subclasses. @@ -982,7 +942,7 @@ object Denotations { if (symbol == NoSymbol) symbol.toString else s"" - def definedPeriodsString: String = { + def definedPeriodsString: String = var sb = new StringBuilder() var cur = this var cnt = 0 @@ -995,7 +955,6 @@ object Denotations { }) () sb.toString - } // ------ PreDenotation ops ---------------------------------------------- @@ -1026,7 +985,7 @@ object Denotations { case FullMatch => !alwaysCompareTypes || info.matches(other.info) case MethodNotAMethodMatch => - !ctx.erasedTypes && { + !ctx.erasedTypes `&&`: // A Scala zero-parameter method and a Scala non-method always match. if !thisLanguage.isJava && !otherLanguage.isJava then true @@ -1039,7 +998,6 @@ object Denotations { symbol.is(Method) else // otherLanguage.isJava other.symbol.is(Method) - } case ParamMatch => // The signatures do not tell us enough to be sure about matching !ctx.erasedTypes && info.matches(other.info) @@ -1067,12 +1025,11 @@ object Denotations { type AsSeenFromResult = SingleDenotation - protected def computeAsSeenFrom(pre: Type)(using Context): SingleDenotation = { + protected def computeAsSeenFrom(pre: Type)(using Context): SingleDenotation = val symbol = this.symbol - val owner = this match { + val owner = this match case thisd: SymDenotation => thisd.owner case _ => if (symbol.exists) symbol.owner else NoSymbol - } /** The derived denotation with the given `info` transformed with `asSeenFrom`. * @@ -1115,7 +1072,6 @@ object Denotations { then derivedSingleDenotation(symbol, normalizedArgBounds(arg.bounds), pre) else derived(symbol.info) else derived(symbol.info) - } /** The argument bounds, possibly intersected with the parameter's info TypeBounds, * if the latter is not F-bounded and does not refer to other type parameters @@ -1136,18 +1092,16 @@ object Denotations { if tp.symbol.isAllOf(ClassTypeParam) && tp.symbol.owner == cls => true case _ => foldOver(x, tp) acc(false, symbol.info) - } abstract class NonSymSingleDenotation(symbol: Symbol, initInfo: Type, override val prefix: Type) - extends SingleDenotation(symbol, initInfo, initInfo.isInstanceOf[TypeType]) { + extends SingleDenotation(symbol, initInfo, initInfo.isInstanceOf[TypeType]): def infoOrCompleter: Type = initInfo - } class UniqueRefDenotation( symbol: Symbol, initInfo: Type, initValidFor: Period, - prefix: Type) extends NonSymSingleDenotation(symbol, initInfo, prefix) { + prefix: Type) extends NonSymSingleDenotation(symbol, initInfo, prefix): validFor = initValidFor override def hasUniqueSym: Boolean = true protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = @@ -1155,34 +1109,30 @@ object Denotations { new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) else new UniqueRefDenotation(s, i, validFor, pre) - } class JointRefDenotation( symbol: Symbol, initInfo: Type, initValidFor: Period, prefix: Type, - override val isRefinedMethod: Boolean) extends NonSymSingleDenotation(symbol, initInfo, prefix) { + override val isRefinedMethod: Boolean) extends NonSymSingleDenotation(symbol, initInfo, prefix): validFor = initValidFor override def hasUniqueSym: Boolean = false protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = new JointRefDenotation(s, i, validFor, pre, isRefinedMethod) - } - class ErrorDenotation(using Context) extends NonSymSingleDenotation(NoSymbol, NoType, NoType) { + class ErrorDenotation(using Context) extends NonSymSingleDenotation(NoSymbol, NoType, NoType): override def exists: Boolean = false override def hasUniqueSym: Boolean = false validFor = Period.allInRun(ctx.runId) protected def newLikeThis(s: Symbol, i: Type, pre: Type, isRefinedMethod: Boolean): SingleDenotation = this - } /** An error denotation that provides more info about the missing reference. * Produced by staticRef, consumed by requiredSymbol. */ - case class MissingRef(val owner: SingleDenotation, name: Name)(using Context) extends ErrorDenotation { + case class MissingRef(val owner: SingleDenotation, name: Name)(using Context) extends ErrorDenotation: val ex: Exception = new Exception // DEBUG - } /** An error denotation that provides more info about alternatives * that were found but that do not qualify. @@ -1199,7 +1149,7 @@ object Denotations { // --- Overloaded denotations and predenotations ------------------------------------------------- - trait MultiPreDenotation extends PreDenotation { + trait MultiPreDenotation extends PreDenotation: def denot1: PreDenotation def denot2: PreDenotation @@ -1221,9 +1171,8 @@ object Denotations { protected def derivedUnion(denot1: PreDenotation, denot2: PreDenotation) = if ((denot1 eq this.denot1) && (denot2 eq this.denot2)) this else denot1 union denot2 - } - final case class DenotUnion(denot1: PreDenotation, denot2: PreDenotation) extends MultiPreDenotation { + final case class DenotUnion(denot1: PreDenotation, denot2: PreDenotation) extends MultiPreDenotation: def exists: Boolean = true def toDenot(pre: Type)(using Context): Denotation = denot1.toDenot(pre).meet(denot2.toDenot(pre), pre) @@ -1232,11 +1181,10 @@ object Denotations { type AsSeenFromResult = PreDenotation def computeAsSeenFrom(pre: Type)(using Context): PreDenotation = derivedUnion(denot1.asSeenFrom(pre), denot2.asSeenFrom(pre)) - } /** An overloaded denotation consisting of the alternatives of both given denotations. */ - case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol, NoType, isType = false) with MultiPreDenotation { + case class MultiDenotation(denot1: Denotation, denot2: Denotation) extends Denotation(NoSymbol, NoType, isType = false) with MultiPreDenotation: validFor = denot1.validFor & denot2.validFor final def infoOrCompleter: Type = multiHasNot("info") @@ -1252,7 +1200,7 @@ object Denotations { derivedUnionDenotation(denot1.current, denot2.current) def altsWith(p: Symbol => Boolean): List[SingleDenotation] = denot1.altsWith(p) ++ denot2.altsWith(p) - def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = { + def suchThat(p: Symbol => Boolean)(using Context): SingleDenotation = val sd1 = denot1.suchThat(p) val sd2 = denot2.suchThat(p) if sd1.exists then @@ -1263,18 +1211,16 @@ object Denotations { | ${denot2.symbol.showLocated}: ${denot2.info}""") else sd1 else sd2 - } override def filterWithPredicate(p: SingleDenotation => Boolean): Denotation = derivedUnionDenotation(denot1.filterWithPredicate(p), denot2.filterWithPredicate(p)) def hasAltWith(p: SingleDenotation => Boolean): Boolean = denot1.hasAltWith(p) || denot2.hasAltWith(p) - def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = { + def accessibleFrom(pre: Type, superAccess: Boolean)(using Context): Denotation = val d1 = denot1 accessibleFrom (pre, superAccess) val d2 = denot2 accessibleFrom (pre, superAccess) if (!d1.exists) d2 else if (!d2.exists) d1 else derivedUnionDenotation(d1, d2) - } def mapInfo(f: Type => Type)(using Context): Denotation = derivedUnionDenotation(denot1.mapInfo(f), denot2.mapInfo(f)) def derivedUnionDenotation(d1: Denotation, d2: Denotation): Denotation = @@ -1290,33 +1236,29 @@ object Denotations { private def multiHasNot(op: String): Nothing = throw new UnsupportedOperationException( s"multi-denotation with alternatives $alternatives does not implement operation $op") - } /** The current denotation of the static reference given by path, * or a MissingRef or NoQualifyingRef instance, if it does not exist. * if generateStubs is set, generates stubs for missing top-level symbols */ - def staticRef(path: Name, generateStubs: Boolean = true, isPackage: Boolean = false)(using Context): Denotation = { - def select(prefix: Denotation, selector: Name): Denotation = { + def staticRef(path: Name, generateStubs: Boolean = true, isPackage: Boolean = false)(using Context): Denotation = + def select(prefix: Denotation, selector: Name): Denotation = val owner = prefix.disambiguate(_.info.isParameterless) def isPackageFromCoreLibMissing: Boolean = // if the scala package is missing, the stdlib must be missing owner.symbol == defn.RootClass && selector == nme.scala - if (owner.exists) { + if (owner.exists) val result = if (isPackage) owner.info.decl(selector) else owner.info.member(selector) if (result.exists) result else if (isPackageFromCoreLibMissing) throw new MissingCoreLibraryException(selector.toString) - else { + else val alt = if (generateStubs) missingHook(owner.symbol.moduleClass, selector) else NoSymbol if (alt.exists) alt.denot else MissingRef(owner, selector) - } - } else owner - } - def recur(path: Name, wrap: TermName => Name = identity): Denotation = path match { + def recur(path: Name, wrap: TermName => Name = identity): Denotation = path match case path: TypeName => recur(path.toTermName, n => n.toTypeName) case ModuleClassName(underlying) => @@ -1326,7 +1268,7 @@ object Denotations { case qn @ AnyQualifiedName(prefix, _) => recur(prefix, n => wrap(qn.info.mkString(n).toTermName)) case path: SimpleName => - def recurSimple(len: Int, wrap: TermName => Name): Denotation = { + def recurSimple(len: Int, wrap: TermName => Name): Denotation = val point = path.lastIndexOf('.', len - 1) val selector = wrap(path.slice(point + 1, len).asTermName) val prefix = @@ -1334,14 +1276,11 @@ object Denotations { else if (selector.isTermName) defn.RootClass.denot else defn.EmptyPackageClass.denot select(prefix, selector) - } recurSimple(path.length, wrap) - } val run = ctx.run if run == null then recur(path) else run.staticRefs.getOrElseUpdate(path, recur(path)) - } /** If we are looking for a non-existing term name in a package, * assume it is a package for which we do not have a directory and @@ -1354,8 +1293,6 @@ object Denotations { NoSymbol /** An exception for accessing symbols that are no longer valid in current run */ - class StaleSymbol(msg: => String) extends Exception { + class StaleSymbol(msg: => String) extends Exception: util.Stats.record("stale symbol") override def getMessage(): String = msg - } -} diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 8100bea374eb..6574ff4040a7 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -1,9 +1,9 @@ package dotty.tools.dotc package core -object Flags { +object Flags: - object opaques { + object opaques: /** A FlagSet represents a set of flags. Flags are encoded as follows: * The first two bits indicate whether a flag set applies to terms, @@ -17,12 +17,11 @@ object Flags { /** A flag set consisting of a single flag */ opaque type Flag <: FlagSet = Long private[Flags] def Flag(bits: Long): Flag = bits - } export opaques.FlagSet type Flag = opaques.Flag - extension (x: FlagSet) { + extension (x: FlagSet) inline def bits: Long = opaques.toBits(x) @@ -34,22 +33,20 @@ object Flags { def | (y: FlagSet): FlagSet = if (x.bits == 0) y else if (y.bits == 0) x - else { + else val tbits = x.bits & y.bits & KINDFLAGS if (tbits == 0) assert(false, s"illegal flagset combination: ${x.flagsString} and ${y.flagsString}") FlagSet(tbits | ((x.bits | y.bits) & ~KINDFLAGS)) - } /** The intersection of the given flag sets */ def & (y: FlagSet): FlagSet = FlagSet(x.bits & y.bits) /** The intersection of a flag set with the complement of another flag set */ - def &~ (y: FlagSet): FlagSet = { + def &~ (y: FlagSet): FlagSet = val tbits = x.bits & KINDFLAGS if ((tbits & y.bits) == 0) x else FlagSet(tbits | ((x.bits & ~y.bits) & ~KINDFLAGS)) - } def ^ (y: FlagSet) = FlagSet((x.bits | y.bits) & KINDFLAGS | (x.bits ^ y.bits) & ~KINDFLAGS) @@ -57,10 +54,9 @@ object Flags { /** Does the given flag set contain the given flag? * This means that both the kind flags and the carrier bits have non-empty intersection. */ - def is (flag: Flag): Boolean = { + def is (flag: Flag): Boolean = val fs = x.bits & flag.bits (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 - } /** Does the given flag set contain the given flag * and at the same time contain none of the flags in the `butNot` set? @@ -70,10 +66,9 @@ object Flags { /** Does the given flag set have a non-empty intersection with another flag set? * This means that both the kind flags and the carrier bits have non-empty intersection. */ - def isOneOf (flags: FlagSet): Boolean = { + def isOneOf (flags: FlagSet): Boolean = val fs = x.bits & flags.bits (fs & KINDFLAGS) != 0 && (fs & ~KINDFLAGS) != 0 - } /** Does the given flag set have a non-empty intersection with another flag set, * and at the same time contain none of the flags in the `butNot` set? @@ -83,11 +78,10 @@ object Flags { /** Does a given flag set have all of the flags of another flag set? * Pre: The intersection of the term/type flags of both sets must be non-empty. */ - def isAllOf (flags: FlagSet): Boolean = { + def isAllOf (flags: FlagSet): Boolean = val fs = x.bits & flags.bits ((fs & KINDFLAGS) != 0 || flags.bits == 0) && (fs >>> TYPESHIFT) == (flags.bits >>> TYPESHIFT) - } /** Does a given flag set have all of the flags in another flag set * and at the same time contain none of the flags in the `butNot` set? @@ -124,33 +118,29 @@ object Flags { /** The list of non-empty names of flags with given index idx that are set in the given flag set */ private def flagString(idx: Int): List[String] = if ((x.bits & (1L << idx)) == 0) Nil - else { + else def halfString(kind: Int) = if ((x.bits & (1L << kind)) != 0) flagName(idx)(kind) else "" val termFS = halfString(TERMindex) val typeFS = halfString(TYPEindex) val strs = termFS :: (if (termFS == typeFS) Nil else typeFS :: Nil) strs filter (_.nonEmpty) - } /** The list of non-empty names of flags that are set in the given flag set */ - def flagStrings(privateWithin: String = ""): Seq[String] = { + def flagStrings(privateWithin: String = ""): Seq[String] = var rawStrings = (2 to MaxFlag).flatMap(x.flagString(_)) // DOTTY problem: cannot drop with (_) if (!privateWithin.isEmpty && !x.is(Protected)) rawStrings = rawStrings :+ "private" val scopeStr = if (x.is(Local)) "this" else privateWithin if (scopeStr != "") - rawStrings.filter(_ != "").map { + rawStrings.filter(_ != "").map: case "private" => s"private[$scopeStr]" case "protected" => s"protected[$scopeStr]" case str => str - } else rawStrings - } /** The string representation of the given flag set */ def flagsString: String = x.flagStrings("").mkString(" ") - } // Temporary while extension names are in flux def or(x1: FlagSet, x2: FlagSet) = x1 | x2 @@ -181,12 +171,11 @@ object Flags { if (isDefinedAsFlag(idx)) bits | (1L << idx) else bits)) /** The union of all flags in given flag set */ - def union(flagss: FlagSet*): FlagSet = { + def union(flagss: FlagSet*): FlagSet = var flag = EmptyFlags for (f <- flagss) flag |= f flag - } def commonFlags(flagss: FlagSet*): FlagSet = union(flagss.map(_.toCommonFlags): _*) @@ -202,12 +191,11 @@ object Flags { * @param name The name to be used for the term flag * @param typeName The name to be used for the type flag, if it is different from `name`. */ - private def newFlags(index: Int, name: String, typeName: String = ""): (Flag, Flag, Flag) = { + private def newFlags(index: Int, name: String, typeName: String = ""): (Flag, Flag, Flag) = flagName(index)(TERMindex) = name flagName(index)(TYPEindex) = if (typeName.isEmpty) name else typeName val bits = 1L << index (opaques.Flag(KINDFLAGS | bits), opaques.Flag(TERMS | bits), opaques.Flag(TYPES | bits)) - } // ----------------- Available flags ----------------------------------------------------- @@ -610,4 +598,3 @@ object Flags { val SyntheticParam: FlagSet = Synthetic | Param val SyntheticTermParam: FlagSet = Synthetic | TermParam val SyntheticTypeParam: FlagSet = Synthetic | TypeParam -} diff --git a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala index bb65cce84042..e5f6e8b6a0ca 100644 --- a/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/GadtConstraint.scala @@ -99,8 +99,8 @@ class GadtConstraint private ( def externalize(tp: Type, theMap: TypeMap | Null = null)(using Context): Type = tp match case param: TypeParamRef => reverseMapping(param) match - case sym: Symbol => sym.typeRef - case null => param + case sym: Symbol => sym.typeRef + case null => param case tp: TypeAlias => tp.derivedAlias(externalize(tp.alias, theMap)) case tp => (if theMap == null then ExternalizeMap() else theMap).mapOver(tp) @@ -123,16 +123,14 @@ class GadtConstraint private ( else ntTvar case _ => tp - private def containsNoInternalTypes(tp: Type, theAcc: TypeAccumulator[Boolean] | Null = null)(using Context): Boolean = tp match { + private def containsNoInternalTypes(tp: Type, theAcc: TypeAccumulator[Boolean] | Null = null)(using Context): Boolean = tp match case tpr: TypeParamRef => !reverseMapping.contains(tpr) case tv: TypeVar => !reverseMapping.contains(tv.origin) case tp => (if (theAcc != null) theAcc else new ContainsNoInternalTypesAccumulator()).foldOver(true, tp) - } - private class ContainsNoInternalTypesAccumulator(using Context) extends TypeAccumulator[Boolean] { + private class ContainsNoInternalTypesAccumulator(using Context) extends TypeAccumulator[Boolean]: override def apply(x: Boolean, tp: Type): Boolean = x && containsNoInternalTypes(tp, this) - } override def toText(printer: Printer): Texts.Text = printer.toText(this) @@ -150,7 +148,7 @@ end GadtConstraint object GadtState: def apply(gadt: GadtConstraint): GadtState = ProperGadtState(gadt) -sealed trait GadtState { +sealed trait GadtState: this: ConstraintHandling => // Hide ConstraintHandling within GadtConstraintHandling def gadt: GadtConstraint @@ -166,7 +164,7 @@ sealed trait GadtState { * @see [[ConstraintHandling.addToConstraint]] */ def addToConstraint(sym: Symbol)(using Context): Boolean = addToConstraint(sym :: Nil) - def addToConstraint(params: List[Symbol])(using Context): Boolean = { + def addToConstraint(params: List[Symbol])(using Context): Boolean = import NameKinds.DepParamName val poly1 = PolyType(params.map { sym => DepParamName.fresh(sym.name.toTypeName) })( @@ -174,7 +172,7 @@ sealed trait GadtState { // In bound type `tp`, replace the symbols in dependent positions with their internal TypeParamRefs. // The replaced symbols will be later picked up in `ConstraintHandling#addToConstraint` // and used as orderings. - def substDependentSyms(tp: Type, isUpper: Boolean)(using Context): Type = { + def substDependentSyms(tp: Type, isUpper: Boolean)(using Context): Type = def loop(tp: Type) = substDependentSyms(tp, isUpper) tp match case tp @ AndType(tp1, tp2) if !isUpper => @@ -189,7 +187,6 @@ sealed trait GadtState { case _ => tp case i => pt.paramRefs(i) case tp => tp - } val tb = param.info.bounds tb.derivedTypeBounds( @@ -209,10 +206,9 @@ sealed trait GadtState { // The replaced symbols are picked up here. addToConstraint(poly1, tvars) .showing(i"added to constraint: [$poly1] $params%, % gadt = $gadt", gadts) - } /** Further constrain a symbol already present in the constraint. */ - def addBound(sym: Symbol, bound: Type, isUpper: Boolean)(using Context): Boolean = { + def addBound(sym: Symbol, bound: Type, isUpper: Boolean)(using Context): Boolean = val symTvar: TypeVar = gadt.stripInternalTypeVar(gadt.tvarOrError(sym)) match case tv: TypeVar => tv case inst => @@ -231,18 +227,16 @@ sealed trait GadtState { case bound => addBoundTransitively(symTvar.origin, bound, isUpper) - gadts.println { + gadts.println: val descr = if isUpper then "upper" else "lower" val op = if isUpper then "<:" else ">:" i"adding $descr bound $sym $op $bound = $result" - } if constraint ne saved then gadt = gadt.withWasConstrained result - } /** See [[ConstraintHandling.approximation]] */ - def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = { + def approximation(sym: Symbol, fromBelow: Boolean, maxLevel: Int = Int.MaxValue)(using Context): Type = approximation(gadt.tvarOrError(sym).origin, fromBelow, maxLevel).match case tpr: TypeParamRef => // Here we do externalization when the returned type is a TypeParamRef, @@ -250,8 +244,7 @@ sealed trait GadtState { // the type variable is instantiated. See #15531. gadt.externalize(tpr) case tp => tp - .showing(i"approximating $sym ~> $result", gadts) - } + .showing(i"approximating $sym ~> $result", gadts) def fresh: GadtState = GadtState(gadt) @@ -280,7 +273,6 @@ sealed trait GadtState { // ---- Debug ------------------------------------------------------------ override def constr = gadtsConstr -} // Hide ConstraintHandling within GadtState private class ProperGadtState(private var myGadt: GadtConstraint) extends ConstraintHandling with GadtState: diff --git a/compiler/src/dotty/tools/dotc/core/Hashable.scala b/compiler/src/dotty/tools/dotc/core/Hashable.scala index 79da5f1dcd6f..bdfedeaae663 100644 --- a/compiler/src/dotty/tools/dotc/core/Hashable.scala +++ b/compiler/src/dotty/tools/dotc/core/Hashable.scala @@ -5,7 +5,7 @@ import Types._ import scala.util.hashing.{ MurmurHash3 => hashing } import annotation.tailrec -object Hashable { +object Hashable: /** A null terminated list of BindingTypes. We use `null` here for efficiency */ class SomeBinders(val tp: BindingType, val next: Binders) @@ -13,10 +13,9 @@ object Hashable { type Binders = SomeBinders | Null /** A null terminated list of pairs of BindingTypes. Used for isomorphism tests. */ - class SomeBinderPairs(tp1: BindingType, tp2: BindingType, next: BinderPairs) { + class SomeBinderPairs(tp1: BindingType, tp2: BindingType, next: BinderPairs): @tailrec final def matches(t1: Type, t2: Type): Boolean = (t1 `eq` tp1) && (t2 `eq` tp2) || next != null && next.matches(t1, t2) - } type BinderPairs = SomeBinderPairs | Null @@ -37,9 +36,8 @@ object Hashable { /** An alternative value if computeHash would otherwise yield HashUnknown */ private[core] inline val HashUnknownAlt = 4321 -} -trait Hashable { +trait Hashable: import Hashable._ protected def hashSeed: Int = getClass.hashCode @@ -52,37 +50,32 @@ trait Hashable { def identityHash(bs: Binders): Int = avoidSpecialHashes(System.identityHashCode(this)) - protected def finishHash(bs: Binders, seed: Int, arity: Int, tp: Type): Int = { + protected def finishHash(bs: Binders, seed: Int, arity: Int, tp: Type): Int = val elemHash = typeHash(bs, tp) if (elemHash == NotCached) return NotCached finishHash(hashing.mix(seed, elemHash), arity + 1) - } - protected def finishHash(bs: Binders, seed: Int, arity: Int, tp1: Type, tp2: Type): Int = { + protected def finishHash(bs: Binders, seed: Int, arity: Int, tp1: Type, tp2: Type): Int = val elemHash = typeHash(bs, tp1) if (elemHash == NotCached) return NotCached finishHash(bs, hashing.mix(seed, elemHash), arity + 1, tp2) - } - protected def finishHash(bs: Binders, seed: Int, arity: Int, tps: List[Type]): Int = { + protected def finishHash(bs: Binders, seed: Int, arity: Int, tps: List[Type]): Int = var h = seed var xs = tps var len = arity - while (!xs.isEmpty) { + while (!xs.isEmpty) val elemHash = typeHash(bs, xs.head) if (elemHash == NotCached) return NotCached h = hashing.mix(h, elemHash) xs = xs.tail len += 1 - } finishHash(h, len) - } - protected def finishHash(bs: Binders, seed: Int, arity: Int, tp: Type, tps: List[Type]): Int = { + protected def finishHash(bs: Binders, seed: Int, arity: Int, tp: Type, tps: List[Type]): Int = val elemHash = typeHash(bs, tp) if (elemHash == NotCached) return NotCached finishHash(bs, hashing.mix(seed, elemHash), arity + 1, tps) - } protected final def doHash(x: Any): Int = @@ -123,4 +116,3 @@ trait Hashable { if (h == NotCached) NotCachedAlt else if (h == HashUnknown) HashUnknownAlt else h -} diff --git a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala index 60fc4a4274e0..713cf9735093 100644 --- a/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala +++ b/compiler/src/dotty/tools/dotc/core/JavaNullInterop.scala @@ -33,7 +33,7 @@ import Types._ * to handle the full spectrum of Scala types. Additionally, some kinds of symbols like constructors and * enum instances get special treatment. */ -object JavaNullInterop { +object JavaNullInterop: /** Transforms the type `tp` of Java member `sym` to be explicitly nullable. * `tp` is needed because the type inside `sym` might not be set when this method is called. @@ -51,7 +51,7 @@ object JavaNullInterop { * * But the selection can throw an NPE if the returned value is `null`. */ - def nullifyMember(sym: Symbol, tp: Type, isEnumValueDef: Boolean)(using Context): Type = { + def nullifyMember(sym: Symbol, tp: Type, isEnumValueDef: Boolean)(using Context): Type = assert(ctx.explicitNulls) assert(sym.is(JavaDefined), "can only nullify java-defined members") @@ -67,7 +67,6 @@ object JavaNullInterop { else // Otherwise, nullify everything nullifyType(tp) - } private def hasNotNullAnnot(sym: Symbol)(using Context): Boolean = ctx.definitions.NotNullAnnots.exists(nna => sym.unforcedAnnotation(nna).isDefined) @@ -95,7 +94,7 @@ object JavaNullInterop { * This is useful for e.g. constructors, and also so that `A & B` is nullified * to `(A & B) | Null`, instead of `(A | Null & B | Null) | Null`. */ - private class JavaNullMap(var outermostLevelAlreadyNullable: Boolean)(using Context) extends TypeMap { + private class JavaNullMap(var outermostLevelAlreadyNullable: Boolean)(using Context) extends TypeMap: /** Should we nullify `tp` at the outermost level? */ def needsNull(tp: Type): Boolean = !outermostLevelAlreadyNullable && (tp match { @@ -113,7 +112,7 @@ object JavaNullInterop { case _ => true }) - override def apply(tp: Type): Type = tp match { + override def apply(tp: Type): Type = tp match case tp: TypeRef if needsNull(tp) => OrNull(tp) case appTp @ AppliedType(tycon, targs) => val oldOutermostNullable = outermostLevelAlreadyNullable @@ -144,6 +143,3 @@ object JavaNullInterop { // In particular, if the type is a ConstantType, then we don't nullify it because it is the // type of a final non-nullable field. case _ => tp - } - } -} diff --git a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala index d8f41ef99b11..ac9806f9895a 100644 --- a/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala +++ b/compiler/src/dotty/tools/dotc/core/MacroClassLoader.scala @@ -5,7 +5,7 @@ import dotty.tools.dotc.util.Property import dotty.tools.dotc.reporting.trace import dotty.tools.io.ClassPath -object MacroClassLoader { +object MacroClassLoader: /** A key to be used in a context property that caches the class loader used for macro expansion */ private val MacroClassLoaderKey = new Property.Key[ClassLoader] @@ -18,12 +18,10 @@ object MacroClassLoader { def init(ctx: FreshContext): ctx.type = ctx.setProperty(MacroClassLoaderKey, makeMacroClassLoader(using ctx)) - private def makeMacroClassLoader(using Context): ClassLoader = trace("new macro class loader") { + private def makeMacroClassLoader(using Context): ClassLoader = trace("new macro class loader"): import scala.language.unsafeNulls val entries = ClassPath.expandPath(ctx.settings.classpath.value, expandStar=true) val urls = entries.map(cp => java.nio.file.Paths.get(cp).toUri.toURL).toArray val out = Option(ctx.settings.outputDir.value.toURL) // to find classes in case of suspended compilation new java.net.URLClassLoader(urls ++ out.toList, getClass.getClassLoader) - } -} diff --git a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala index 60ebc95e7bed..1ab84f22da9e 100644 --- a/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala +++ b/compiler/src/dotty/tools/dotc/core/MatchTypeTrace.scala @@ -28,7 +28,7 @@ object MatchTypeTrace: */ def record(op: Context ?=> Any)(using Context): String = val trace = new MatchTrace - inContext(ctx.fresh.setProperty(MatchTrace, trace)) { + inContext(ctx.fresh.setProperty(MatchTrace, trace)): op if trace.entries.isEmpty then "" else @@ -37,7 +37,6 @@ object MatchTypeTrace: |Note: a match type could not be fully reduced: | |${trace.entries.reverse.map(explainEntry)}%\n%""" - } /** Are we running an operation that records a match type trace? */ def isRecording(using Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/core/Mode.scala b/compiler/src/dotty/tools/dotc/core/Mode.scala index ea63eb6a419b..aca9bb9cdb53 100644 --- a/compiler/src/dotty/tools/dotc/core/Mode.scala +++ b/compiler/src/dotty/tools/dotc/core/Mode.scala @@ -1,7 +1,7 @@ package dotty.tools.dotc.core /** A collection of mode bits that are part of a context */ -case class Mode(val bits: Int) extends AnyVal { +case class Mode(val bits: Int) extends AnyVal: import Mode._ def | (that: Mode): Mode = Mode(bits | that.bits) def & (that: Mode): Mode = Mode(bits & that.bits) @@ -15,17 +15,15 @@ case class Mode(val bits: Int) extends AnyVal { def ==(that: Mode): Boolean = this.bits == that.bits def !=(that: Mode): Boolean = this.bits != that.bits -} -object Mode { +object Mode: val None: Mode = Mode(0) private val modeName = new Array[String](32) - def newMode(bit: Int, name: String): Mode = { + def newMode(bit: Int, name: String): Mode = modeName(bit) = name Mode(1 << bit) - } val Pattern: Mode = newMode(0, "Pattern") val Type: Mode = newMode(1, "Type") @@ -144,4 +142,3 @@ object Mode { /** We are checking the original call of an Inlined node */ val InlinedCall: Mode = newMode(31, "InlinedCall") -} diff --git a/compiler/src/dotty/tools/dotc/core/NameKinds.scala b/compiler/src/dotty/tools/dotc/core/NameKinds.scala index 2c968ab9446c..bb0ed7462317 100644 --- a/compiler/src/dotty/tools/dotc/core/NameKinds.scala +++ b/compiler/src/dotty/tools/dotc/core/NameKinds.scala @@ -12,7 +12,7 @@ import Decorators._ import scala.annotation.internal.sharable /** Defines possible kinds of NameInfo of a derived name */ -object NameKinds { +object NameKinds: // These are sharable since all NameKinds are created eagerly at the start of the program // before any concurrent threads are forked. for this to work, NameKinds should never @@ -23,11 +23,10 @@ object NameKinds { @sharable private val uniqueNameKinds = util.HashMap[String, UniqueNameKind]() /** A class for the info stored in a derived name */ - abstract class NameInfo { + abstract class NameInfo: def kind: NameKind def mkString(underlying: TermName): String def map(f: SimpleName => SimpleName): NameInfo = this - } /** An abstract base class of classes that define the kind of a derived name info */ abstract class NameKind(val tag: Int) { self => @@ -72,7 +71,7 @@ object NameKinds { /** The kind of names that add a simple classification to an underlying name. */ - abstract class ClassifiedNameKind(tag: Int, val infoString: String) extends NameKind(tag) { + abstract class ClassifiedNameKind(tag: Int, val infoString: String) extends NameKind(tag): type ThisInfo = Info val info: Info = new Info @@ -80,38 +79,33 @@ object NameKinds { def apply(underlying: TermName): TermName = underlying.derived(info) /** Extractor operation for names of this kind */ - def unapply(name: DerivedName): Option[TermName] = name match { + def unapply(name: DerivedName): Option[TermName] = name match case DerivedName(underlying, `info`) => Some(underlying) case _ => None - } simpleNameKinds(tag) = this: @unchecked - } /** The kind of names that get formed by adding a prefix to an underlying name */ class PrefixNameKind(tag: Int, prefix: String, optInfoString: String = "") - extends ClassifiedNameKind(tag, if (optInfoString.isEmpty) s"Prefix $prefix" else optInfoString) { + extends ClassifiedNameKind(tag, if (optInfoString.isEmpty) s"Prefix $prefix" else optInfoString): def mkString(underlying: TermName, info: ThisInfo): String = underlying.qualToString(_.toString, n => prefix + n.toString) override def unmangle(name: SimpleName): TermName = if (name.startsWith(prefix)) apply(name.drop(prefix.length).asSimpleName) else name - } /** The kind of names that get formed by appending a suffix to an underlying name */ class SuffixNameKind(tag: Int, suffix: String, optInfoString: String = "") - extends ClassifiedNameKind(tag, if (optInfoString.isEmpty) s"Suffix $suffix" else optInfoString) { + extends ClassifiedNameKind(tag, if (optInfoString.isEmpty) s"Suffix $suffix" else optInfoString): def mkString(underlying: TermName, info: ThisInfo): String = underlying.qualToString(_.toString, n => n.toString + suffix) override def unmangle(name: SimpleName): TermName = if (name.endsWith(suffix)) apply(name.take(name.length - suffix.length).asSimpleName) else name - } /** A base trait for infos that define an additional selector name */ - trait QualifiedInfo extends NameInfo { + trait QualifiedInfo extends NameInfo: val name: SimpleName - } /** The kind of qualified names, consisting of an underlying name as a prefix, * followed by a separator, followed by a simple selector name. @@ -119,13 +113,12 @@ object NameKinds { * A qualified names always constitutes a new name, different from its underlying name. */ class QualifiedNameKind(tag: Int, val separator: String) - extends NameKind(tag) { + extends NameKind(tag): type ThisInfo = QualInfo - case class QualInfo(name: SimpleName) extends Info with QualifiedInfo { + case class QualInfo(name: SimpleName) extends Info with QualifiedInfo: override def map(f: SimpleName => SimpleName): NameInfo = new QualInfo(f(name)) override def toString: String = s"$infoString $name" override def hashCode = scala.runtime.ScalaRunTime._hashCode(this) * 31 + kind.hashCode - } def apply(qual: TermName, name: SimpleName): TermName = qual.derived(new QualInfo(name)) @@ -134,15 +127,13 @@ object NameKinds { * Needed because the suffix of an expanded name may itself be expanded. * For example, look at javap of scala.App.initCode */ - def apply(qual: TermName, name: TermName): TermName = name replace { + def apply(qual: TermName, name: TermName): TermName = name replace: case name: SimpleName => apply(qual, name) case AnyQualifiedName(_, _) => apply(qual, name.toSimpleName) - } - def unapply(name: DerivedName): Option[(TermName, SimpleName)] = name match { + def unapply(name: DerivedName): Option[(TermName, SimpleName)] = name match case DerivedName(qual, info: this.QualInfo) => Some((qual, info.name)) case _ => None - } override def definesNewName: Boolean = true override def definesQualifiedName: Boolean = true @@ -153,53 +144,44 @@ object NameKinds { def infoString: String = s"Qualified $separator" qualifiedNameKinds(tag) = this: @unchecked - } /** An extractor for qualified names of an arbitrary kind */ - object AnyQualifiedName { - def unapply(name: DerivedName): Option[(TermName, SimpleName)] = name match { + object AnyQualifiedName: + def unapply(name: DerivedName): Option[(TermName, SimpleName)] = name match case DerivedName(qual, info: QualifiedInfo) => Some((name.underlying, info.name)) case _ => None - } - } /** A base trait for infos that contain a number */ - trait NumberedInfo extends NameInfo { + trait NumberedInfo extends NameInfo: def num: Int - } /** The kind of numbered names consisting of an underlying name and a number */ abstract class NumberedNameKind(tag: Int, val infoString: String) extends NameKind(tag) { self => type ThisInfo = NumberedInfo - case class NumberedInfo(val num: Int) extends Info with NameKinds.NumberedInfo { + case class NumberedInfo(val num: Int) extends Info with NameKinds.NumberedInfo: override def toString: String = s"$infoString $num" override def hashCode = scala.runtime.ScalaRunTime._hashCode(this) * 31 + kind.hashCode - } def apply(qual: TermName, num: Int): TermName = qual.derived(new NumberedInfo(num)) - def unapply(name: DerivedName): Option[(TermName, Int)] = name match { + def unapply(name: DerivedName): Option[(TermName, Int)] = name match case DerivedName(underlying, info: this.NumberedInfo) => Some((underlying, info.num)) case _ => None - } - protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = { + protected def skipSeparatorAndNum(name: SimpleName, separator: String): Int = var i = name.length while (i > 0 && name(i - 1).isDigit) i -= 1 if (i > separator.length && i < name.length && name.slice(i - separator.length, i).toString == separator) i else -1 - } numberedNameKinds(tag) = this: @unchecked } /** An extractor for numbered names of arbitrary kind */ - object AnyNumberedName { - def unapply(name: DerivedName): Option[(TermName, Int)] = name match { + object AnyNumberedName: + def unapply(name: DerivedName): Option[(TermName, Int)] = name match case DerivedName(qual, info: NumberedInfo) => Some((qual, info.num)) case _ => None - } - } /** The kind of unique names that consist of an underlying name (can be empty), * a separator indicating the class of unique name, and a unique number. @@ -207,15 +189,14 @@ object NameKinds { * A unique names always constitutes a new name, different from its underlying name. */ case class UniqueNameKind(val separator: String) - extends NumberedNameKind(UNIQUE, s"Unique $separator") { + extends NumberedNameKind(UNIQUE, s"Unique $separator"): override def definesNewName: Boolean = true val separatorName = separator.toTermName - def mkString(underlying: TermName, info: ThisInfo): String = { + def mkString(underlying: TermName, info: ThisInfo): String = val safePrefix = str.sanitize(underlying.toString) + separator safePrefix + info.num - } /** Generate fresh unique term name of this kind with given prefix name */ def fresh(prefix: TermName = EmptyTermName)(using Context): TermName = @@ -226,19 +207,15 @@ object NameKinds { fresh(prefix.toTermName).toTypeName uniqueNameKinds(separator) = this: @unchecked - } /** An extractor for unique names of arbitrary kind */ - object AnyUniqueName { - def unapply(name: DerivedName): Option[(TermName, TermName, Int)] = name match { + object AnyUniqueName: + def unapply(name: DerivedName): Option[(TermName, TermName, Int)] = name match case DerivedName(qual, info: NumberedInfo) => - info.kind match { + info.kind match case unique: UniqueNameKind => Some((qual, unique.separatorName, info.num)) case _ => None - } case _ => None - } - } /** Names of the form `prefix . name` */ val QualifiedName: QualifiedNameKind = new QualifiedNameKind(QUALIFIED, ".") @@ -250,14 +227,14 @@ object NameKinds { val ExpandPrefixName: QualifiedNameKind = new QualifiedNameKind(EXPANDPREFIX, "$") /** Expanded names of the form `prefix $$ name`. */ - val ExpandedName: QualifiedNameKind = new QualifiedNameKind(EXPANDED, str.EXPAND_SEPARATOR) { + val ExpandedName: QualifiedNameKind = new QualifiedNameKind(EXPANDED, str.EXPAND_SEPARATOR): private val FalseSuper = termName("$$super") private val FalseSuperLength = FalseSuper.length - override def unmangle(name: SimpleName): TermName = { + override def unmangle(name: SimpleName): TermName = var i = name.lastIndexOfSlice(str.EXPAND_SEPARATOR) if (i < 0) name - else { + else // Hack to make super accessors from traits work. They would otherwise fail because of #765 // The problem is that in `x$$super$$plus` the expansion prefix needs to be `x` // instead of `x$$super`. @@ -265,18 +242,14 @@ object NameKinds { i -= FalseSuper.length apply(name.take(i).asTermName, name.drop(i + str.EXPAND_SEPARATOR.length).asSimpleName) - } - } - } /** Expanded names of the form `prefix $_setter_$ name`. These only occur in Scala2. */ val TraitSetterName: QualifiedNameKind = new QualifiedNameKind(TRAITSETTER, str.TRAIT_SETTER_SEPARATOR) /** Unique names of the form `prefix $ n` or `$ n $` */ - val UniqueName: UniqueNameKind = new UniqueNameKind("$") { + val UniqueName: UniqueNameKind = new UniqueNameKind("$"): override def mkString(underlying: TermName, info: ThisInfo) = if (underlying.isEmpty) "$" + info.num + "$" else super.mkString(underlying, info) - } /** Other unique names */ val TempResultName: UniqueNameKind = new UniqueNameKind("ev$") @@ -305,17 +278,14 @@ object NameKinds { /** A kind of unique extension methods; Unlike other unique names, these can be * unmangled. */ - val UniqueExtMethName: UniqueNameKind = new UniqueNameKind("$extension") { - override def unmangle(name: SimpleName): TermName = { + val UniqueExtMethName: UniqueNameKind = new UniqueNameKind("$extension"): + override def unmangle(name: SimpleName): TermName = val i = skipSeparatorAndNum(name, separator) - if (i > 0) { + if (i > 0) val index = name.drop(i).toString.toInt val original = name.take(i - separator.length).asTermName apply(original, index) - } else name - } - } /** Kinds of unique names generated by the pattern matcher */ val PatMatStdBinderName: UniqueNameKind = new UniqueNameKind("x") @@ -328,33 +298,27 @@ object NameKinds { val BoundaryName: UniqueNameKind = new UniqueNameKind("boundary") /** The kind of names of default argument getters */ - val DefaultGetterName: NumberedNameKind = new NumberedNameKind(DEFAULTGETTER, "DefaultGetter") { - def mkString(underlying: TermName, info: ThisInfo) = { + val DefaultGetterName: NumberedNameKind = new NumberedNameKind(DEFAULTGETTER, "DefaultGetter"): + def mkString(underlying: TermName, info: ThisInfo) = val prefix = if (underlying.isConstructorName) nme.DEFAULT_GETTER_INIT else underlying prefix.toString + str.DEFAULT_GETTER + (info.num + 1) - } // TODO: Reduce code duplication with UniqueExtMethName - override def unmangle(name: SimpleName): TermName = { + override def unmangle(name: SimpleName): TermName = val i = skipSeparatorAndNum(name, str.DEFAULT_GETTER) - if (i > 0) { + if (i > 0) val index = name.drop(i).toString.toInt - 1 var original = name.take(i - str.DEFAULT_GETTER.length).asTermName if (original == nme.DEFAULT_GETTER_INIT) original = nme.CONSTRUCTOR apply(original, index) - } else name - } - } /** Names of the form N_. Emitted by inliner, replaced by outer path * in ExplicitOuter. */ - val OuterSelectName: NumberedNameKind = new NumberedNameKind(OUTERSELECT, "OuterSelect") { - def mkString(underlying: TermName, info: ThisInfo) = { + val OuterSelectName: NumberedNameKind = new NumberedNameKind(OUTERSELECT, "OuterSelect"): + def mkString(underlying: TermName, info: ThisInfo) = assert(underlying.isEmpty) s"${info.num}_" - } - } val SuperAccessorName: PrefixNameKind = new PrefixNameKind(SUPERACCESSOR, "super$") val InitializerName: PrefixNameKind = new PrefixNameKind(INITIALIZER, "initial$") @@ -369,9 +333,8 @@ object NameKinds { case BothBounds extends AvoidNameKind(AVOIDBOTH, "(avoid)") val BodyRetainerName: SuffixNameKind = new SuffixNameKind(BODYRETAINER, "$retainedBody") - val FieldName: SuffixNameKind = new SuffixNameKind(FIELD, "$$local") { + val FieldName: SuffixNameKind = new SuffixNameKind(FIELD, "$$local"): override def mkString(underlying: TermName, info: ThisInfo) = underlying.toString - } val ExplicitFieldName: SuffixNameKind = new SuffixNameKind(EXPLICITFIELD, "$field") val ExtMethName: SuffixNameKind = new SuffixNameKind(EXTMETH, "$extension") val ParamAccessorName: SuffixNameKind = new SuffixNameKind(PARAMACC, "$accessor") @@ -381,27 +344,24 @@ object NameKinds { val SyntheticSetterName: SuffixNameKind = new SuffixNameKind(SETTER, "_$eq") /** A name together with a signature. Used in Tasty trees. */ - object SignedName extends NameKind(SIGNED) { + object SignedName extends NameKind(SIGNED): - case class SignedInfo(sig: Signature, target: TermName) extends Info { + case class SignedInfo(sig: Signature, target: TermName) extends Info: assert(sig ne Signature.NotAMethod) override def toString: String = val targetStr = if target.isEmpty then "" else s" @$target" s"$infoString $sig$targetStr" override def hashCode = scala.runtime.ScalaRunTime._hashCode(this) * 31 + kind.hashCode - } type ThisInfo = SignedInfo def apply(qual: TermName, sig: Signature, target: TermName): TermName = qual.derived(new SignedInfo(sig, target)) - def unapply(name: DerivedName): Option[(TermName, Signature, TermName)] = name match { + def unapply(name: DerivedName): Option[(TermName, Signature, TermName)] = name match case DerivedName(underlying, info: SignedInfo) => Some((underlying, info.sig, info.target)) case _ => None - } def mkString(underlying: TermName, info: ThisInfo): String = s"$underlying[with sig ${info.sig}]" def infoString: String = "Signed" - } /** Possible name kinds of a method that comes from Scala2 pickling info. * and that need to be unmangled. Note: Scala2 protected accessors and setters @@ -414,4 +374,3 @@ object NameKinds { def qualifiedNameKindOfTag : util.ReadOnlyMap[Int, QualifiedNameKind] = qualifiedNameKinds def numberedNameKindOfTag : util.ReadOnlyMap[Int, NumberedNameKind] = numberedNameKinds def uniqueNameKindOfSeparator: util.ReadOnlyMap[String, UniqueNameKind] = uniqueNameKinds -} diff --git a/compiler/src/dotty/tools/dotc/core/NameOps.scala b/compiler/src/dotty/tools/dotc/core/NameOps.scala index 04440c9e9b39..d6193d1b9ee6 100644 --- a/compiler/src/dotty/tools/dotc/core/NameOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NameOps.scala @@ -12,9 +12,9 @@ import Decorators.* import Definitions._ import nme._ -object NameOps { +object NameOps: - object compactify { + object compactify: lazy val md5: MessageDigest = MessageDigest.getInstance("MD5").nn inline val CLASSFILE_NAME_CHAR_LIMIT = 240 @@ -31,14 +31,14 @@ object NameOps { * * (+6 for ".class"). MaxNameLength can therefore be computed as follows: */ - def apply(s: String): String = { + def apply(s: String): String = val marker = "$$$$" val MaxNameLength = (CLASSFILE_NAME_CHAR_LIMIT - 6).min( 2 * (CLASSFILE_NAME_CHAR_LIMIT - 6 - 2 * marker.length - 32) ) - def toMD5(s: String, edge: Int): String = { + def toMD5(s: String, edge: Int): String = val prefix = s.take(edge) val suffix = s.takeRight(edge) @@ -48,19 +48,15 @@ object NameOps { val md5chars = md5.digest().nn.map(b => (b & 0xFF).toHexString).mkString prefix + marker + md5chars + marker + suffix - } if (s.length <= MaxNameLength) s else toMD5(s, MaxNameLength / 4) - } - } - extension [N <: Name](name: N) { + extension [N <: Name](name: N) - def testSimple(f: SimpleName => Boolean): Boolean = name match { + def testSimple(f: SimpleName => Boolean): Boolean = name match case name: SimpleName => f(name) case name: TypeName => name.toTermName.testSimple(f) case _ => false - } private def likeSpacedN(n: Name): N = name.likeSpaced(n).asInstanceOf[N] @@ -85,39 +81,34 @@ object NameOps { /** Is name of a variable pattern? */ def isVarPattern: Boolean = testSimple { n => - n.length > 0 && { + n.length > 0 `&&`: def isLowerLetterSupplementary: Boolean = import Character.{isHighSurrogate, isLowSurrogate, isLetter, isLowerCase, isValidCodePoint, toCodePoint} - isHighSurrogate(n(0)) && n.length > 1 && isLowSurrogate(n(1)) && { + isHighSurrogate(n(0)) && n.length > 1 && isLowSurrogate(n(1)) `&&`: val codepoint = toCodePoint(n(0), n(1)) isValidCodePoint(codepoint) && isLetter(codepoint) && isLowerCase(codepoint) - } val first = n.head ((first.isLower && first.isLetter || first == '_' || isLowerLetterSupplementary) && n != false_ && n != true_ && n != null_) - } } || name.is(PatMatGivenVarName) - def isOpAssignmentName: Boolean = name match { + def isOpAssignmentName: Boolean = name match case raw.NE | raw.LE | raw.GE | EMPTY => false case name: SimpleName => name.length > 0 && name.last == '=' && name.head != '=' && isOperatorPart(name.firstCodePoint) case _ => false - } /** is this the name of an object enclosing packagel-level definitions? */ - def isPackageObjectName: Boolean = name match { + def isPackageObjectName: Boolean = name match case name: TermName => name == nme.PACKAGE || name.endsWith(str.TOPLEVEL_SUFFIX) case name: TypeName => - name.toTermName match { + name.toTermName match case ModuleClassName(original) => original.isPackageObjectName case _ => false - } - } /** Convert this module name to corresponding module class name */ def moduleClassName: TypeName = name.derived(ModuleClassName).toTypeName @@ -129,18 +120,16 @@ object NameOps { * method needs to work on mangled as well as unmangled names because * it is also called from the backend. */ - def stripModuleClassSuffix: N = likeSpacedN { + def stripModuleClassSuffix: N = likeSpacedN: val semName = name.toTermName match case name: SimpleName if name.endsWith(str.MODULE_SUFFIX) && name.lastPart != MODULE_SUFFIX => name.unmangleClassName case _ => name semName.exclude(ModuleClassName) - } /** If flags is a ModuleClass but not a Package, add module class suffix */ - def adjustIfModuleClass(flags: FlagSet): N = likeSpacedN { + def adjustIfModuleClass(flags: FlagSet): N = likeSpacedN: if (flags.is(ModuleClass, butNot = Package)) name.asTypeName.moduleClassName else name.toTermName - } /** The expanded name. * This is the fully qualified name of `base` with `ExpandPrefixName` as separator, @@ -150,20 +139,16 @@ object NameOps { likeSpacedN { base.fullNameSeparated(ExpandPrefixName, kind, name) } /** Revert the expanded name. */ - def unexpandedName: N = likeSpacedN { - name.replaceDeep { + def unexpandedName: N = likeSpacedN: + name.replaceDeep: case ExpandedName(_, unexp) => unexp - } - } def errorName: N = likeSpacedN(name ++ nme.ERROR) - def freshened(using Context): N = likeSpacedN { - name.toTermName match { + def freshened(using Context): N = likeSpacedN: + name.toTermName match case ModuleClassName(original) => ModuleClassName(original.freshened) case name => UniqueName.fresh(name) - } - } /** Do two target names match? An empty target name matchws any other name. */ def matchesTargetName(other: Name) = @@ -258,32 +243,29 @@ object NameOps { if suffixStart >= 0 then checkedFunArity(suffixStart) else -1 /** The name of the generic runtime operation corresponding to an array operation */ - def genericArrayOp: TermName = name match { + def genericArrayOp: TermName = name match case nme.apply => nme.array_apply case nme.length => nme.array_length case nme.update => nme.array_update case nme.clone_ => nme.array_clone - } /** The name of the primitive runtime operation corresponding to an array operation */ - def primitiveArrayOp: TermName = name match { + def primitiveArrayOp: TermName = name match case nme.apply => nme.primitive.arrayApply case nme.length => nme.primitive.arrayLength case nme.update => nme.primitive.arrayUpdate case nme.clone_ => nme.clone_ - } /** This method is to be used on **type parameters** from a class, since * this method does sorting based on their names */ - def specializedFor(classTargs: List[Type], classTargsNames: List[Name], methodTargs: List[Type], methodTarsNames: List[Name])(using Context): N = { + def specializedFor(classTargs: List[Type], classTargsNames: List[Name], methodTargs: List[Type], methodTarsNames: List[Name])(using Context): N = val methodTags: Seq[Name] = (methodTargs zip methodTarsNames).sortBy(_._2).map(x => defn.typeTag(x._1)) val classTags: Seq[Name] = (classTargs zip classTargsNames).sortBy(_._2).map(x => defn.typeTag(x._1)) likeSpacedN(name ++ nme.specializedTypeNames.prefix ++ methodTags.fold(nme.EMPTY)(_ ++ _) ++ nme.specializedTypeNames.separator ++ classTags.fold(nme.EMPTY)(_ ++ _) ++ nme.specializedTypeNames.suffix) - } /** Determines if the current name is the specialized name of the given base name. * For example `typeName("Tuple2$mcII$sp").isSpecializedNameOf(tpnme.Tuple2) == true` @@ -327,35 +309,30 @@ object NameOps { /** If name length exceeds allowable limit, replace part of it by hash */ def compactified(using Context): TermName = termName(compactify(name.toString)) - def unmangleClassName: N = name.toTermName match { + def unmangleClassName: N = name.toTermName match case name: SimpleName if name.endsWith(str.MODULE_SUFFIX) && !nme.falseModuleClassNames.contains(name) => likeSpacedN(name.dropRight(str.MODULE_SUFFIX.length).moduleClassName) case _ => name - } - def unmangle(kind: NameKind): N = likeSpacedN { + def unmangle(kind: NameKind): N = likeSpacedN: name match case name: SimpleName => kind.unmangle(name) case name: TypeName => name.toTermName.unmangle(kind).toTypeName case _ => - name replace { + name replace: case unmangled: SimpleName => kind.unmangle(unmangled) case ExpandedName(prefix, last) => - kind.unmangle(last) replace { + kind.unmangle(last) replace: case kernel: SimpleName => ExpandedName(prefix, kernel) - } - } - } - def unmangle(kinds: List[NameKind]): N = { + def unmangle(kinds: List[NameKind]): N = val unmangled = kinds.foldLeft(name)(_.unmangle(_)) if (unmangled eq name) name else unmangled.unmangle(kinds) - } def firstCodePoint: Int = val first = name.firstPart @@ -364,9 +341,8 @@ object NameOps { val codepoint = toCodePoint(first(0), first(1)) if isValidCodePoint(codepoint) then codepoint else first(0) else first(0) - } - extension (name: TermName) { + extension (name: TermName) def setterName: TermName = name.exclude(FieldName) ++ str.SETTER_SUFFIX @@ -392,13 +368,12 @@ object NameOps { if (name.isScala2LocalSuffix) name.asSimpleName.dropRight(1) else name /** The name unary_x for a prefix operator x */ - def toUnaryName: TermName = name match { + def toUnaryName: TermName = name match case raw.MINUS => UNARY_- case raw.PLUS => UNARY_+ case raw.TILDE => UNARY_~ case raw.BANG => UNARY_! case _ => name - } /** If this is a super accessor name, its underlying name, which is the name * of the method that the super accessor forwards to. @@ -408,5 +383,3 @@ object NameOps { case ExpandedName(_, name1) => name1.originalOfSuperAccessorName case ExpandPrefixName(_, name1) => name1.originalOfSuperAccessorName case _ => name - } -} diff --git a/compiler/src/dotty/tools/dotc/core/NameTags.scala b/compiler/src/dotty/tools/dotc/core/NameTags.scala index 59dfaa3d437b..891cdff150a5 100644 --- a/compiler/src/dotty/tools/dotc/core/NameTags.scala +++ b/compiler/src/dotty/tools/dotc/core/NameTags.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.core import dotty.tools.tasty.TastyFormat /** The possible tags of a NameKind */ -object NameTags extends TastyFormat.NameTags { +object NameTags extends TastyFormat.NameTags: inline val FLATTENED = 5 // A flat name, generated by Flatten @@ -40,7 +40,7 @@ object NameTags extends TastyFormat.NameTags { inline val EXPLICITFIELD = 38 // An explicitly named field, introduce to avoid a clash // with a regular field of the underlying name - def nameTagToString(tag: Int): String = tag match { + def nameTagToString(tag: Int): String = tag match case UTF8 => "UTF8" case QUALIFIED => "QUALIFIED" case FLATTENED => "FLATTENED" @@ -64,5 +64,3 @@ object NameTags extends TastyFormat.NameTags { case SIGNED => "SIGNED" case TARGETSIGNED => "TARGETSIGNED" - } -} diff --git a/compiler/src/dotty/tools/dotc/core/NamerOps.scala b/compiler/src/dotty/tools/dotc/core/NamerOps.scala index dc09edd79781..32ad4e9c46a2 100644 --- a/compiler/src/dotty/tools/dotc/core/NamerOps.scala +++ b/compiler/src/dotty/tools/dotc/core/NamerOps.scala @@ -88,8 +88,8 @@ object NamerOps: private def underlyingStableClassRef(tp: Type)(using Context): TypeRef | NoType.type = tp match case EtaExpansion(tp1) => underlyingStableClassRef(tp1) case _ => tp.underlyingClassRef(refinementOK = false) match - case ref: TypeRef if ref.prefix.isStable => ref - case _ => NoType + case ref: TypeRef if ref.prefix.isStable => ref + case _ => NoType /** Does symbol `sym` need constructor proxies to be generated? */ def needsConstructorProxies(sym: Symbol)(using Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/core/Names.scala b/compiler/src/dotty/tools/dotc/core/Names.scala index 1e08379b57f0..a05a92695a77 100644 --- a/compiler/src/dotty/tools/dotc/core/Names.scala +++ b/compiler/src/dotty/tools/dotc/core/Names.scala @@ -12,7 +12,7 @@ import util.{LinearMap, HashSet} import scala.annotation.internal.sharable -object Names { +object Names: import NameKinds._ /** Things that can be turned into names with `toTermName` and `toTypeName`. @@ -30,7 +30,7 @@ object Names { * in a name table. A derived term name adds a tag, and possibly a number * or a further simple name to some other name. */ - abstract class Name extends Designator, Showable derives CanEqual { + abstract class Name extends Designator, Showable derives CanEqual: /** A type for names of the same kind as this name */ type ThisName <: Name @@ -157,10 +157,9 @@ object Names { override def hashCode: Int = System.identityHashCode(this) override def equals(that: Any): Boolean = this eq that.asInstanceOf[AnyRef] - } /** Names for terms, can be simple or derived */ - abstract class TermName extends Name { + abstract class TermName extends Name: type ThisName = TermName override def isTypeName: Boolean = false @@ -175,9 +174,8 @@ object Names { override def toTypeName: TypeName = if myTypeName == null then - synchronized { + synchronized: if myTypeName == null then myTypeName = new TypeName(this) - } myTypeName.nn override def likeSpaced(name: Name): TermName = name.toTermName @@ -188,7 +186,7 @@ object Names { @sharable // because of synchronized block in `and` private var derivedNames: LinearMap[NameInfo, DerivedName] = LinearMap.empty - private def add(info: NameInfo): TermName = synchronized { + private def add(info: NameInfo): TermName = synchronized: val dnOpt = derivedNames.lookup(info) dnOpt match case null => @@ -196,21 +194,18 @@ object Names { derivedNames = derivedNames.updated(info, derivedName) derivedName case _ => dnOpt - } private def rewrap(underlying: TermName) = if (underlying eq this.underlying) this else underlying.add(info) - override def derived(info: NameInfo): TermName = { + override def derived(info: NameInfo): TermName = val thisKind = this.info.kind val thatKind = info.kind if (thisKind.tag < thatKind.tag || thatKind.definesNewName) add(info) else if (thisKind.tag > thatKind.tag) rewrap(underlying.derived(info)) - else { + else assert(info == this.info) this - } - } /** Is it impossible that names of kind `kind` also qualify as names of kind `shadowed`? */ private def shadows(kind: NameKind, shadowed: NameKind): Boolean = @@ -218,18 +213,16 @@ object Names { kind.definesQualifiedName || kind.definesNewName && !shadowed.definesQualifiedName - override def exclude(kind: NameKind): TermName = { + override def exclude(kind: NameKind): TermName = val thisKind = this.info.kind if (shadows(thisKind, kind)) this else if (thisKind.tag > kind.tag) rewrap(underlying.exclude(kind)) else underlying - } - override def is(kind: NameKind): Boolean = { + override def is(kind: NameKind): Boolean = val thisKind = this.info.kind thisKind == kind || !shadows(thisKind, kind) && underlying.is(kind) - } @sharable // because it's just a cache for performance private var myMangledString: String | Null = null @@ -239,16 +232,14 @@ object Names { protected[Names] def mangle: ThisName - final def mangled: ThisName = { + final def mangled: ThisName = if (myMangled == null) myMangled = mangle myMangled.asInstanceOf[ThisName] - } - final def mangledString: String = { + final def mangledString: String = if (myMangledString == null) myMangledString = qualToString(_.mangledString, _.mangled.toString) myMangledString.nn - } /** If this a qualified name, split it into underlying, last part, and separator * Otherwise return an empty name, the name itself, and "") @@ -259,10 +250,9 @@ object Names { * ` `, the sanitized version of `f1() f2()`. * Otherwise `f2` applied to this name. */ - def qualToString(f1: TermName => String, f2: TermName => String): String = { + def qualToString(f1: TermName => String, f2: TermName => String): String = val (first, last, sep) = split if (first.isEmpty) f2(last) else str.sanitize(f1(first) + sep + f2(last)) - } protected def computeToString: String @@ -272,48 +262,43 @@ object Names { if myToString == null then myToString = computeToString myToString.nn - } /** A simple name is essentially an interned string */ - final class SimpleName(val start: Int, val length: Int) extends TermName { + final class SimpleName(val start: Int, val length: Int) extends TermName: /** The n'th character */ def apply(n: Int): Char = chrs(start + n) /** A character in this name satisfies predicate `p` */ - def exists(p: Char => Boolean): Boolean = { + def exists(p: Char => Boolean): Boolean = var i = 0 while (i < length && !p(chrs(start + i))) i += 1 i < length - } /** All characters in this name satisfy predicate `p` */ def forall(p: Char => Boolean): Boolean = !exists(!p(_)) /** The name contains given character `ch` */ - def contains(ch: Char): Boolean = { + def contains(ch: Char): Boolean = var i = 0 while (i < length && chrs(start + i) != ch) i += 1 i < length - } /** The index of the last occurrence of `ch` in this name which is at most * `start`. */ - def lastIndexOf(ch: Char, start: Int = length - 1): Int = { + def lastIndexOf(ch: Char, start: Int = length - 1): Int = var i = start while (i >= 0 && apply(i) != ch) i -= 1 i - } /** The index of the last occurrence of `str` in this name */ def lastIndexOfSlice(str: String): Int = toString.lastIndexOfSlice(str) /** A slice of this name making up the characters between `from` and `until` (exclusive) */ - def slice(from: Int, end: Int): SimpleName = { + def slice(from: Int, end: Int): SimpleName = assert(0 <= from && from <= end && end <= length) termName(chrs, start + from, end - from) - } def drop(n: Int): SimpleName = slice(n, length) def take(n: Int): SimpleName = slice(0, n) @@ -345,21 +330,19 @@ object Names { override def mapParts(f: SimpleName => SimpleName): SimpleName = f(this) override def split: (TermName, SimpleName, String) = (EmptyTermName, this, "") - override def encode: SimpleName = { + override def encode: SimpleName = val dontEncode = this == StdNames.nme.CONSTRUCTOR || this == StdNames.nme.STATIC_CONSTRUCTOR if (dontEncode) this else NameTransformer.encode(this) - } override def decode: SimpleName = NameTransformer.decode(this) override def isEmpty: Boolean = length == 0 - override def startsWith(str: String, start: Int): Boolean = { + override def startsWith(str: String, start: Int): Boolean = var i = 0 while (i < str.length && start + i < length && apply(start + i) == str(i)) i += 1 i == str.length - } override def endsWith(suffix: String): Boolean = var i = 1 @@ -371,13 +354,12 @@ object Names { while i <= suffix.length && i <= length && apply(length - i) == suffix(suffix.length - i) do i += 1 i > suffix.length - override def replace(from: Char, to: Char): SimpleName = { + override def replace(from: Char, to: Char): SimpleName = val cs = new Array[Char](length) System.arraycopy(chrs, start, cs, 0, length) for (i <- 0 until length) if (cs(i) == from) cs(i) = to termName(cs, 0, length) - } override def firstPart: SimpleName = this override def lastPart: SimpleName = this @@ -386,9 +368,9 @@ object Names { protected def computeToString: String = if (length == 0) "" - else { + else if (Config.checkBackendNames) - if (!toStringOK) { + if (!toStringOK) // We print the stacktrace instead of doing an assert directly, // because asserts are caught in exception handlers which might // cause other failures. In that case the first, important failure @@ -396,14 +378,12 @@ object Names { System.err.nn.println("Backend should not call Name#toString, Name#mangledString should be used instead.") Thread.dumpStack() assert(false) - } new String(chrs, start, length) - } /** It's OK to take a toString if the stacktrace does not contain a method * from GenBCode or it also contains one of the whitelisted methods below. */ - private def toStringOK = { + private def toStringOK = val trace: Array[StackTraceElement] = Thread.currentThread.nn.getStackTrace.asInstanceOf[Array[StackTraceElement]] !trace.exists(_.getClassName.nn.endsWith("GenBCode")) || trace.exists(elem => @@ -418,12 +398,10 @@ object Names { "readConstant", "extractedName") .contains(elem.getMethodName)) - } def debugString: String = toString - } - final class TypeName(val toTermName: TermName) extends Name { + final class TypeName(val toTermName: TermName) extends Name: type ThisName = TypeName @@ -458,13 +436,12 @@ object Names { override def toString: String = toTermName.toString override def debugString: String = toTermName.debugString + "/T" - } /** A term name that's derived from an `underlying` name and that * adds `info` to it. */ final case class DerivedName(override val underlying: TermName, override val info: NameInfo) - extends TermName { + extends TermName: override def asSimpleName: Nothing = throw new UnsupportedOperationException(s"$debugString is not a simple name") @@ -473,49 +450,42 @@ object Names { override def replace(f: PartialFunction[Name, Name]): ThisName = if (f.isDefinedAt(this)) likeSpaced(f(this)) - else info match { + else info match case qual: QualifiedInfo => this case _ => underlying.replace(f).derived(info) - } override def collect[T](f: PartialFunction[Name, T]): Option[T] = if (f.isDefinedAt(this)) Some(f(this)) - else info match { + else info match case qual: QualifiedInfo => None case _ => underlying.collect(f) - } override def mapLast(f: SimpleName => SimpleName): ThisName = - info match { + info match case qual: QualifiedInfo => underlying.derived(qual.map(f)) case _ => underlying.mapLast(f).derived(info) - } override def mapParts(f: SimpleName => SimpleName): ThisName = - info match { + info match case qual: QualifiedInfo => underlying.mapParts(f).derived(qual.map(f)) case _ => underlying.mapParts(f).derived(info) - } - override def split: (TermName, TermName, String) = info match { + override def split: (TermName, TermName, String) = info match case info: QualifiedInfo => (underlying, info.name, info.kind.asInstanceOf[QualifiedNameKind].separator) case _ => val (prefix, suffix, separator) = underlying.split (prefix, suffix.derived(info), separator) - } override def isEmpty: Boolean = false override def encode: ThisName = underlying.encode.derived(info.map(_.encode)) override def decode: ThisName = underlying.decode.derived(info.map(_.decode)) override def firstPart: SimpleName = underlying.firstPart - override def lastPart: SimpleName = info match { + override def lastPart: SimpleName = info match case qual: QualifiedInfo => qual.name case _ => underlying.lastPart - } protected def computeToString: String = info.mkString(underlying) override def debugString: String = s"${underlying.debugString}[$info]" - } /** The term name represented by the empty string */ val EmptyTermName: SimpleName = SimpleName(-1, 0) @@ -557,7 +527,7 @@ object Names { idx = (idx + 1) & (myTable.length - 1) name = myTable(idx).asInstanceOf[SimpleName | Null] Stats.record(statsItem("addEntryAt")) - synchronized { + synchronized: if (myTable eq currentTable) && myTable(idx) == null then // Our previous unsynchronized computation of the next free index is still correct. // This relies on the fact that table entries go from null to non-null, and then @@ -573,7 +543,6 @@ object Names { addEntryAt(idx, name.nn) else enterIfNew(cs, offset, len) - } addEntryAt(0, EmptyTermName: @unchecked) end NameTable @@ -583,25 +552,22 @@ object Names { private val nameTable = NameTable() /** The hash of a name made of from characters cs[offset..offset+len-1]. */ - private def hashValue(cs: Array[Char], offset: Int, len: Int): Int = { + private def hashValue(cs: Array[Char], offset: Int, len: Int): Int = var i = offset var hash = 0 - while (i < len + offset) { + while (i < len + offset) hash = 31 * hash + cs(i) i += 1 - } hash - } /** Is (the ASCII representation of) name at given index equal to * cs[offset..offset+len-1]? */ - private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = { + private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = var i = 0 while ((i < len) && (chrs(index + i) == cs(offset + i))) i += 1 i == len - } /** Create a term name from the characters in cs[offset..offset+len-1]. * Assume they are already encoded. @@ -618,10 +584,9 @@ object Names { /** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. * Assume they are already encoded. */ - def termName(bs: Array[Byte], offset: Int, len: Int): SimpleName = { + def termName(bs: Array[Byte], offset: Int, len: Int): SimpleName = val chars = Codec.fromUTF8(bs, offset, len) termName(chars, 0, chars.length) - } /** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1]. * Assume they are already encoded. @@ -641,25 +606,22 @@ object Names { /** The type name represented by the empty string */ val EmptyTypeName: TypeName = EmptyTermName.toTypeName - implicit val NameOrdering: Ordering[Name] = new Ordering[Name] { + implicit val NameOrdering: Ordering[Name] = new Ordering[Name]: private def compareInfos(x: NameInfo, y: NameInfo): Int = if (x.kind.tag != y.kind.tag) x.kind.tag - y.kind.tag - else x match { + else x match case x: QualifiedInfo => - y match { + y match case y: QualifiedInfo => compareSimpleNames(x.name, y.name) - } case x: NumberedInfo => - y match { + y match case y: NumberedInfo => x.num - y.num - } case _ => assert(x == y) 0 - } - private def compareSimpleNames(x: SimpleName, y: SimpleName): Int = { + private def compareSimpleNames(x: SimpleName, y: SimpleName): Int = val until = x.length min y.length var i = 0 while (i < until && x(i) == y(i)) i = i + 1 @@ -668,25 +630,19 @@ object Names { else /*(x(i) > y(i))*/ 1 else x.length - y.length - } - private def compareTermNames(x: TermName, y: TermName): Int = x match { + private def compareTermNames(x: TermName, y: TermName): Int = x match case x: SimpleName => - y match { + y match case y: SimpleName => compareSimpleNames(x, y) case _ => -1 - } case DerivedName(xPre, xInfo) => - y match { + y match case DerivedName(yPre, yInfo) => val s = compareInfos(xInfo, yInfo) if (s == 0) compareTermNames(xPre, yPre) else s case _ => 1 - } - } def compare(x: Name, y: Name): Int = if (x.isTermName && y.isTypeName) 1 else if (x.isTypeName && y.isTermName) -1 else if (x eq y) 0 else compareTermNames(x.toTermName, y.toTermName) - } -} diff --git a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala index e18271772ff1..7afe0b441588 100644 --- a/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala +++ b/compiler/src/dotty/tools/dotc/core/NullOpsDecorator.scala @@ -14,10 +14,10 @@ object NullOpsDecorator: * If this type isn't (syntactically) nullable, then returns the type unchanged. * The type will not be changed if explicit-nulls is not enabled. */ - def stripNull(using Context): Type = { + def stripNull(using Context): Type = def strip(tp: Type): Type = val tpWiden = tp.widenDealias - val tpStripped = tpWiden match { + val tpStripped = tpWiden match case tp @ OrType(lhs, rhs) => val llhs = strip(lhs) val rrhs = strip(rhs) @@ -36,25 +36,21 @@ object NullOpsDecorator: case tp @ TypeBounds(lo, hi) => tp.derivedTypeBounds(strip(lo), strip(hi)) case tp => tp - } if tpStripped ne tpWiden then tpStripped else tp if ctx.explicitNulls then strip(self) else self - } /** Is self (after widening and dealiasing) a type of the form `T | Null`? */ - def isNullableUnion(using Context): Boolean = { + def isNullableUnion(using Context): Boolean = val stripped = self.stripNull stripped ne self - } end extension import ast.tpd._ extension (self: Tree) // cast the type of the tree to a non-nullable type - def castToNonNullable(using Context): Tree = self.typeOpt match { + def castToNonNullable(using Context): Tree = self.typeOpt match case OrNull(tp) => self.cast(tp) case _ => self - } end NullOpsDecorator diff --git a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala index faea30390d2b..bf38231682bc 100644 --- a/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala +++ b/compiler/src/dotty/tools/dotc/core/OrderingConstraint.scala @@ -14,7 +14,7 @@ import annotation.tailrec import annotation.internal.sharable import cc.{CapturingType, derivedCapturingType} -object OrderingConstraint { +object OrderingConstraint: /** If true, use reverse dependencies in `replace` to avoid checking the bounds * of all parameters in the constraint. This can speed things up, but there are some @@ -43,15 +43,14 @@ object OrderingConstraint { private type ParamOrdering = ArrayValuedMap[List[TypeParamRef]] /** A lens for updating a single entry array in one of the three constraint maps */ - private abstract class ConstraintLens[T <: AnyRef: ClassTag] { + private abstract class ConstraintLens[T <: AnyRef: ClassTag]: def entries(c: OrderingConstraint, poly: TypeLambda): Array[T] | Null def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[T])(using Context): OrderingConstraint def initial: T - def apply(c: OrderingConstraint, poly: TypeLambda, idx: Int): T = { + def apply(c: OrderingConstraint, poly: TypeLambda, idx: Int): T = val es = entries(c, poly) if (es == null) initial else es(idx) - } /** The `current` constraint but with the entry for `param` updated to `entry`. * `current` is used linearly. If it is different from `prev` then `current` is @@ -59,29 +58,24 @@ object OrderingConstraint { * parts of `current` which are not shared by `prev`. */ def update(prev: OrderingConstraint, current: OrderingConstraint, - poly: TypeLambda, idx: Int, entry: T)(using Context): OrderingConstraint = { + poly: TypeLambda, idx: Int, entry: T)(using Context): OrderingConstraint = var es = entries(current, poly) // TODO: investigate why flow typing is not working on `es` if (es != null && (es.nn(idx) eq entry)) current - else { + else val result = - if (es == null) { + if (es == null) es = Array.fill(poly.paramNames.length)(initial) updateEntries(current, poly, es.nn) - } - else { + else val prev_es = entries(prev, poly) if (prev_es == null || (es.nn ne prev_es.nn)) current // can re-use existing entries array. - else { + else es = es.nn.clone updateEntries(current, poly, es.nn) - } - } es.nn(idx) = entry result - } - } def update(prev: OrderingConstraint, current: OrderingConstraint, param: TypeParamRef, entry: T)(using Context): OrderingConstraint = @@ -94,35 +88,30 @@ object OrderingConstraint { def map(prev: OrderingConstraint, current: OrderingConstraint, param: TypeParamRef, f: T => T)(using Context): OrderingConstraint = map(prev, current, param.binder, param.paramNum, f) - } - private val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type] { + private val boundsLens: ConstraintLens[Type] = new ConstraintLens[Type]: def entries(c: OrderingConstraint, poly: TypeLambda): Array[Type] | Null = c.boundsMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[Type])(using Context): OrderingConstraint = c.newConstraint(boundsMap = c.boundsMap.updated(poly, entries)) def initial = NoType - } - private val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + private val lowerLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]]: def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.lowerMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = c.newConstraint(lowerMap = c.lowerMap.updated(poly, entries)) def initial = Nil - } - private val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]] { + private val upperLens: ConstraintLens[List[TypeParamRef]] = new ConstraintLens[List[TypeParamRef]]: def entries(c: OrderingConstraint, poly: TypeLambda): Array[List[TypeParamRef]] | Null = c.upperMap(poly) def updateEntries(c: OrderingConstraint, poly: TypeLambda, entries: Array[List[TypeParamRef]])(using Context): OrderingConstraint = c.newConstraint(upperMap = c.upperMap.updated(poly, entries)) def initial = Nil - } @sharable val empty = new OrderingConstraint(SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentityMap.empty, SimpleIdentitySet.empty) -} import OrderingConstraint._ @@ -149,7 +138,7 @@ import OrderingConstraint._ class OrderingConstraint(private val boundsMap: ParamBounds, private val lowerMap : ParamOrdering, private val upperMap : ParamOrdering, - private val hardVars : TypeVars) extends Constraint { + private val hardVars : TypeVars) extends Constraint: thisConstraint => import UnificationDirection.* @@ -181,42 +170,37 @@ class OrderingConstraint(private val boundsMap: ParamBounds, entries(paramCount(entries) + n) /** The `boundsMap` entry corresponding to `param` */ - def entry(param: TypeParamRef): Type = { + def entry(param: TypeParamRef): Type = val entries = boundsMap(param.binder) if (entries == null) NoType else entries(param.paramNum) - } // ----------- Contains tests -------------------------------------------------- def contains(pt: TypeLambda): Boolean = boundsMap(pt) != null - def contains(param: TypeParamRef): Boolean = { + def contains(param: TypeParamRef): Boolean = val entries = boundsMap(param.binder) entries != null && isBounds(entries(param.paramNum)) - } - def contains(tvar: TypeVar): Boolean = { + def contains(tvar: TypeVar): Boolean = val origin = tvar.origin val entries = boundsMap(origin.binder) val pnum = origin.paramNum entries != null && isBounds(entries(pnum)) && (typeVar(entries, pnum) eq tvar) - } // ---------- Dependency handling ---------------------------------------------- def lower(param: TypeParamRef): List[TypeParamRef] = lowerLens(this, param.binder, param.paramNum) def upper(param: TypeParamRef): List[TypeParamRef] = upperLens(this, param.binder, param.paramNum) - def minLower(param: TypeParamRef): List[TypeParamRef] = { + def minLower(param: TypeParamRef): List[TypeParamRef] = val all = lower(param) all.filterNot(p => all.exists(isLess(p, _))) - } - def minUpper(param: TypeParamRef): List[TypeParamRef] = { + def minUpper(param: TypeParamRef): List[TypeParamRef] = val all = upper(param) all.filterNot(p => all.exists(isLess(_, p))) - } def exclusiveLower(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] = lower(param).filterNot(isLess(_, butNot)) @@ -224,16 +208,14 @@ class OrderingConstraint(private val boundsMap: ParamBounds, def exclusiveUpper(param: TypeParamRef, butNot: TypeParamRef): List[TypeParamRef] = upper(param).filterNot(isLess(butNot, _)) - def bounds(param: TypeParamRef)(using Context): TypeBounds = { + def bounds(param: TypeParamRef)(using Context): TypeBounds = val e = entry(param) if (e.exists) e.bounds - else { + else // TODO: should we change the type of paramInfos to nullable? val pinfos: List[param.binder.PInfo] | Null = param.binder.paramInfos if (pinfos != null) pinfos(param.paramNum) // pinfos == null happens in pos/i536.scala else TypeBounds.empty - } - } // ---------- Info related to TypeParamRefs ------------------------------------------- @@ -386,10 +368,9 @@ class OrderingConstraint(private val boundsMap: ParamBounds, baseCase else bound match case bound: AndOrType => - adjustDelta(bound.tp1, prevBound, isLower, baseCase) && { + adjustDelta(bound.tp1, prevBound, isLower, baseCase) `&&`: adjustReferenced(bound.tp2, isLower, add = true) true - } case _ => false /** Add or remove depenencies referenced in `bounds`. @@ -475,7 +456,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, private def stripParams( tp: Type, todos: mutable.ListBuffer[(OrderingConstraint, TypeParamRef) => OrderingConstraint], - isUpper: Boolean)(using Context): Type = tp match { + isUpper: Boolean)(using Context): Type = tp match case param: TypeParamRef if contains(param) => todos += (if isUpper then order(_, _, param) else order(_, param, _)) NoType @@ -499,9 +480,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, else tp2 case _ => tp - } - def add(poly: TypeLambda, tvars: List[TypeVar])(using Context): This = { + def add(poly: TypeLambda, tvars: List[TypeVar])(using Context): This = assert(!contains(poly)) val nparams = poly.paramNames.length val entries1 = new Array[Type](nparams * 2) @@ -509,18 +489,17 @@ class OrderingConstraint(private val boundsMap: ParamBounds, tvars.copyToArray(entries1, nparams) newConstraint(boundsMap = this.boundsMap.updated(poly, entries1)) .init(poly) - } /** Split dependent parameters off the bounds for parameters in `poly`. * Update all bounds to be normalized and update ordering to account for * dependent parameters. */ - private def init(poly: TypeLambda)(using Context): This = { + private def init(poly: TypeLambda)(using Context): This = var current = this val todos = new mutable.ListBuffer[(OrderingConstraint, TypeParamRef) => OrderingConstraint] var i = 0 val dropWildcards = AvoidWildcardsMap() - while (i < poly.paramNames.length) { + while (i < poly.paramNames.length) val param = poly.paramRefs(i) val bounds = dropWildcards(nonParamBounds(param)) val stripped = stripParams(bounds, todos, isUpper = true) @@ -529,10 +508,8 @@ class OrderingConstraint(private val boundsMap: ParamBounds, current = todos.head(current, param) todos.dropInPlace(1) i += 1 - } current.adjustDeps(poly, current.boundsMap(poly).nn, add = true) .checkWellFormed() - } // ---------- Updates ------------------------------------------------------------ @@ -592,7 +569,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, assert(current.contains(param1), i"$param1") assert(current.contains(param2), i"$param2") val unifying = direction != NoUnification - val newUpper = { + val newUpper = val up = current.exclusiveUpper(param2, param1) if unifying then // Since param2 <:< param1 already holds now, filter out param1 to avoid adding @@ -605,8 +582,7 @@ class OrderingConstraint(private val boundsMap: ParamBounds, filtered else param2 :: up - } - val newLower = { + val newLower = val lower = current.exclusiveLower(param1, param2) if unifying then // Similarly, filter out param2 from lowerly-ordered parameters @@ -619,7 +595,6 @@ class OrderingConstraint(private val boundsMap: ParamBounds, filtered else param1 :: lower - } val current1 = newLower.foldLeft(current)(upperLens.map(this, _, _, newUpper ::: _)) val current2 = newUpper.foldLeft(current1)(lowerLens.map(this, _, _, newLower ::: _)) current2 @@ -643,21 +618,19 @@ class OrderingConstraint(private val boundsMap: ParamBounds, case _ => Nil - private def updateEntry(current: This, param: TypeParamRef, newEntry: Type)(using Context): This = { + private def updateEntry(current: This, param: TypeParamRef, newEntry: Type)(using Context): This = if Config.checkNoWildcardsInConstraint then assert(!newEntry.containsWildcardTypes) val oldEntry = current.entry(param) var current1 = boundsLens.update(this, current, param, newEntry) .adjustDeps(newEntry, oldEntry, param) - newEntry match { + newEntry match case TypeBounds(lo, hi) => for p <- dependentParams(lo, isUpper = false) do current1 = order(current1, p, param) for p <- dependentParams(hi, isUpper = true) do current1 = order(current1, param, p) case _ => - } current1 - } def updateEntry(param: TypeParamRef, tp: Type)(using Context): This = updateEntry(this, param, tp).checkWellFormed() @@ -749,30 +722,25 @@ class OrderingConstraint(private val boundsMap: ParamBounds, current.checkWellFormed() end replace - def remove(pt: TypeLambda)(using Context): This = { - def removeFromOrdering(po: ParamOrdering) = { - def removeFromBoundss(key: TypeLambda, bndss: Array[List[TypeParamRef]]): Array[List[TypeParamRef]] = { + def remove(pt: TypeLambda)(using Context): This = + def removeFromOrdering(po: ParamOrdering) = + def removeFromBoundss(key: TypeLambda, bndss: Array[List[TypeParamRef]]): Array[List[TypeParamRef]] = val bndss1 = bndss.map(_.filterConserve(_.binder ne pt)) if (bndss.corresponds(bndss1)(_ eq _)) bndss else bndss1 - } po.remove(pt).mapValuesNow(removeFromBoundss) - } val hardVars1 = pt.paramRefs.foldLeft(hardVars)((hvs, param) => hvs - typeVarOfParam(param)) newConstraint(boundsMap.remove(pt), removeFromOrdering(lowerMap), removeFromOrdering(upperMap), hardVars1) .adjustDeps(pt, boundsMap(pt).nn, add = false) .checkWellFormed() - } - def isRemovable(pt: TypeLambda): Boolean = { + def isRemovable(pt: TypeLambda): Boolean = val entries = boundsMap(pt).nn @tailrec def allRemovable(last: Int): Boolean = if (last < 0) true - else typeVar(entries, last) match { + else typeVar(entries, last) match case tv: TypeVar => tv.inst.exists && allRemovable(last - 1) case _ => false - } allRemovable(paramCount(entries) - 1) - } // ----------- Joins ----------------------------------------------------- @@ -807,16 +775,14 @@ class OrderingConstraint(private val boundsMap: ParamBounds, case tp => tp def ensureFresh(tl: TypeLambda)(using Context): TypeLambda = - if (contains(tl)) { + if (contains(tl)) var paramInfos = tl.paramInfos - if (tl.isInstanceOf[HKLambda]) { + if (tl.isInstanceOf[HKLambda]) // HKLambdas are hash-consed, need to create an artificial difference by adding // a LazyRef to a bound. val TypeBounds(lo, hi) :: pinfos1 = tl.paramInfos: @unchecked paramInfos = TypeBounds(lo, LazyRef.of(hi)) :: pinfos1 - } ensureFresh(tl.newLikeThis(tl.paramNames, paramInfos, tl.resultType)) - } else tl def checkConsistentVars()(using Context): Unit = @@ -882,19 +848,16 @@ class OrderingConstraint(private val boundsMap: ParamBounds, private var myUninstVars: mutable.ArrayBuffer[TypeVar] | Null = _ /** The uninstantiated typevars of this constraint */ - def uninstVars: collection.Seq[TypeVar] = { - if (myUninstVars == null || myUninstVars.uncheckedNN.exists(_.inst.exists)) { + def uninstVars: collection.Seq[TypeVar] = + if (myUninstVars == null || myUninstVars.uncheckedNN.exists(_.inst.exists)) myUninstVars = new mutable.ArrayBuffer[TypeVar] boundsMap.foreachBinding { (poly, entries) => for (i <- 0 until paramCount(entries)) - typeVar(entries, i) match { + typeVar(entries, i) match case tv: TypeVar if !tv.inst.exists && isBounds(entries(i)) => myUninstVars.uncheckedNN += tv case _ => - } } - } myUninstVars.uncheckedNN - } // ---------- Checking ----------------------------------------------- @@ -985,24 +948,20 @@ class OrderingConstraint(private val boundsMap: ParamBounds, override def toText(printer: Printer): Text = printer.toText(this) - override def toString: String = { - def entryText(tp: Type): String = tp match { + override def toString: String = + def entryText(tp: Type): String = tp match case tp: TypeBounds => tp.toString case _ => " := " + tp - } val constrainedText = " constrained types = " + domainLambdas.mkString("\n") val boundsText = - "\n bounds = " + { + "\n bounds = " `+`: val assocs = for (param <- domainParams) yield s"${param.binder.paramNames(param.paramNum)}: ${entryText(entry(param))}" assocs.mkString("\n") - } val depsText = "\n coDeps = " + coDeps + "\n contraDeps = " + contraDeps constrainedText + boundsText + depsText - } -} diff --git a/compiler/src/dotty/tools/dotc/core/ParamInfo.scala b/compiler/src/dotty/tools/dotc/core/ParamInfo.scala index e88d6540e64b..1843d1c6c739 100644 --- a/compiler/src/dotty/tools/dotc/core/ParamInfo.scala +++ b/compiler/src/dotty/tools/dotc/core/ParamInfo.scala @@ -8,7 +8,7 @@ import Variances.{Variance, varianceToInt} /** A common super trait of Symbol and LambdaParam. * Used to capture the attributes of type parameters which can be implemented as either. */ -trait ParamInfo { +trait ParamInfo: type ThisName <: Name @@ -46,8 +46,6 @@ trait ParamInfo { /** A type that refers to the parameter */ def paramRef(using Context): Type -} -object ParamInfo { +object ParamInfo: type Of[N] = ParamInfo { type ThisName = N } -} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala index 5e8a960608e6..4845e130518e 100644 --- a/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala +++ b/compiler/src/dotty/tools/dotc/core/PatternTypeConstrainer.scala @@ -73,48 +73,41 @@ trait PatternTypeConstrainer { self: TypeComparer => * scrutinee and pattern types. This does not apply if the pattern type is only applied to type variables, * in which case the subtyping relationship "heals" the type. */ - def constrainPatternType(pat: Type, scrut: Type, forceInvariantRefinement: Boolean = false): Boolean = trace(i"constrainPatternType($scrut, $pat)", gadts) { + def constrainPatternType(pat: Type, scrut: Type, forceInvariantRefinement: Boolean = false): Boolean = trace(i"constrainPatternType($scrut, $pat)", gadts): - def classesMayBeCompatible: Boolean = { + def classesMayBeCompatible: Boolean = import Flags._ val patCls = pat.classSymbol val scrCls = scrut.classSymbol - !patCls.exists || !scrCls.exists || { + !patCls.exists || !scrCls.exists `||`: if (patCls.is(Final)) patCls.derivesFrom(scrCls) else if (scrCls.is(Final)) scrCls.derivesFrom(patCls) else if (!patCls.is(Flags.Trait) && !scrCls.is(Flags.Trait)) patCls.derivesFrom(scrCls) || scrCls.derivesFrom(patCls) else true - } - } - def stripRefinement(tp: Type): Type = tp match { + def stripRefinement(tp: Type): Type = tp match case tp: RefinedOrRecType => stripRefinement(tp.parent) case tp => tp - } - def tryConstrainSimplePatternType(pat: Type, scrut: Type) = { + def tryConstrainSimplePatternType(pat: Type, scrut: Type) = val patCls = pat.classSymbol val scrCls = scrut.classSymbol patCls.exists && scrCls.exists && (patCls.derivesFrom(scrCls) || scrCls.derivesFrom(patCls)) && constrainSimplePatternType(pat, scrut, forceInvariantRefinement) - } - def constrainUpcasted(scrut: Type): Boolean = trace(i"constrainUpcasted($scrut)", gadts) { + def constrainUpcasted(scrut: Type): Boolean = trace(i"constrainUpcasted($scrut)", gadts): // Fold a list of types into an AndType - def buildAndType(xs: List[Type]): Type = { - @annotation.tailrec def recur(acc: Type, rem: List[Type]): Type = rem match { + def buildAndType(xs: List[Type]): Type = + @annotation.tailrec def recur(acc: Type, rem: List[Type]): Type = rem match case Nil => acc case x :: rem => recur(AndType(acc, x), rem) - } - xs match { + xs match case Nil => NoType case x :: xs => recur(x, xs) - } - } - scrut match { + scrut match case scrut: TypeRef if scrut.symbol.isClass => // consider all parents val parents = scrut.parents @@ -123,7 +116,7 @@ trait PatternTypeConstrainer { self: TypeComparer => case scrut @ AppliedType(tycon: TypeRef, _) if tycon.symbol.isClass => val patCls = pat.classSymbol // find all shared parents in the inheritance hierarchy between pat and scrut - def allParentsSharedWithPat(tp: Type, tpClassSym: ClassSymbol): List[Symbol] = { + def allParentsSharedWithPat(tp: Type, tpClassSym: ClassSymbol): List[Symbol] = var parents = tpClassSym.info.parents if parents.nonEmpty && parents.head.classSymbol == defn.ObjectClass then parents = parents.tail @@ -132,38 +125,32 @@ trait PatternTypeConstrainer { self: TypeComparer => if patCls.derivesFrom(sym) then List(sym) else allParentsSharedWithPat(tp, sym) } - } val allSyms = allParentsSharedWithPat(tycon, tycon.symbol.asClass) val baseClasses = allSyms map scrut.baseType val andType = buildAndType(baseClasses) !andType.exists || constrainPatternType(pat, andType) case _ => - def tryGadtBounds = scrut match { + def tryGadtBounds = scrut match case scrut: TypeRef => - ctx.gadt.bounds(scrut.symbol) match { + ctx.gadt.bounds(scrut.symbol) match case tb: TypeBounds => val hi = tb.hi constrainPatternType(pat, hi) case null => true - } case _ => true - } def trySuperType = - val upcasted: Type = scrut match { + val upcasted: Type = scrut match case scrut: TypeProxy => scrut.superType case _ => NoType - } if (upcasted.exists) tryConstrainSimplePatternType(pat, upcasted) || constrainUpcasted(upcasted) else true tryGadtBounds && trySuperType - } - } - def dealiasDropNonmoduleRefs(tp: Type) = tp.dealias match { + def dealiasDropNonmoduleRefs(tp: Type) = tp.dealias match case tp: TermRef => // we drop TermRefs that don't have a class symbol, as they can't // meaningfully participate in GADT reasoning and just get in the way. @@ -173,28 +160,24 @@ trait PatternTypeConstrainer { self: TypeComparer => // See run/enum-Tree.scala. if tp.classSymbol.exists then tp else tp.info case tp => tp - } - dealiasDropNonmoduleRefs(scrut) match { + dealiasDropNonmoduleRefs(scrut) match case OrType(scrut1, scrut2) => either(constrainPatternType(pat, scrut1), constrainPatternType(pat, scrut2)) case AndType(scrut1, scrut2) => constrainPatternType(pat, scrut1) && constrainPatternType(pat, scrut2) case scrut: RefinedOrRecType => constrainPatternType(pat, stripRefinement(scrut)) - case scrut => dealiasDropNonmoduleRefs(pat) match { - case OrType(pat1, pat2) => - either(constrainPatternType(pat1, scrut), constrainPatternType(pat2, scrut)) - case AndType(pat1, pat2) => - constrainPatternType(pat1, scrut) && constrainPatternType(pat2, scrut) - case pat: RefinedOrRecType => - constrainPatternType(stripRefinement(pat), scrut) - case pat => - tryConstrainSimplePatternType(pat, scrut) - || classesMayBeCompatible && constrainUpcasted(scrut) - } - } - } + case scrut => dealiasDropNonmoduleRefs(pat) match + case OrType(pat1, pat2) => + either(constrainPatternType(pat1, scrut), constrainPatternType(pat2, scrut)) + case AndType(pat1, pat2) => + constrainPatternType(pat1, scrut) && constrainPatternType(pat2, scrut) + case pat: RefinedOrRecType => + constrainPatternType(stripRefinement(pat), scrut) + case pat => + tryConstrainSimplePatternType(pat, scrut) + || classesMayBeCompatible && constrainUpcasted(scrut) /** Constrain "simple" patterns (see `constrainPatternType`). * @@ -230,15 +213,14 @@ trait PatternTypeConstrainer { self: TypeComparer => * case classes without also appropriately extending the relevant case class * (see `RefChecks#checkCaseClassInheritanceInvariant`). */ - def constrainSimplePatternType(patternTp: Type, scrutineeTp: Type, forceInvariantRefinement: Boolean): Boolean = { - def refinementIsInvariant(tp: Type): Boolean = tp match { + def constrainSimplePatternType(patternTp: Type, scrutineeTp: Type, forceInvariantRefinement: Boolean): Boolean = + def refinementIsInvariant(tp: Type): Boolean = tp match case tp: SingletonType => true case tp: ClassInfo => tp.cls.is(Final) || tp.cls.is(Case) case tp: TypeProxy => refinementIsInvariant(tp.superType) case _ => false - } - def widenVariantParams(tp: Type) = tp match { + def widenVariantParams(tp: Type) = tp match case tp @ AppliedType(tycon, args) => val args1 = args.zipWithConserve(tycon.typeParams)((arg, tparam) => if (tparam.paramVarianceSign != 0) TypeBounds.empty else arg @@ -246,7 +228,6 @@ trait PatternTypeConstrainer { self: TypeComparer => tp.derivedAppliedType(tycon, args1) case tp => tp - } val patternCls = patternTp.classSymbol val scrutineeCls = scrutineeTp.classSymbol @@ -261,12 +242,12 @@ trait PatternTypeConstrainer { self: TypeComparer => val assumeInvariantRefinement = migrateTo3 || forceInvariantRefinement || refinementIsInvariant(patternTp) - trace(i"constraining simple pattern type $tp >:< $pt", gadts, (res: Boolean) => i"$res gadt = ${ctx.gadt}") { - (tp, pt) match { + trace(i"constraining simple pattern type $tp >:< $pt", gadts, (res: Boolean) => i"$res gadt = ${ctx.gadt}"): + (tp, pt) match case (AppliedType(tyconS, argsS), AppliedType(tyconP, argsP)) => val saved = state.nn.constraint val result = - ctx.gadtState.rollbackGadtUnless { + ctx.gadtState.rollbackGadtUnless: tyconS.typeParams.lazyZip(argsS).lazyZip(argsP).forall { (param, argS, argP) => val variance = param.paramVarianceSign if variance == 0 || assumeInvariantRefinement || @@ -282,7 +263,6 @@ trait PatternTypeConstrainer { self: TypeComparer => res else true } - } if !result then constraint = saved result @@ -292,7 +272,4 @@ trait PatternTypeConstrainer { self: TypeComparer => // be co-inhabited, just that we cannot extract information out of them directly // and should upcast. false - } - } - } } diff --git a/compiler/src/dotty/tools/dotc/core/Periods.scala b/compiler/src/dotty/tools/dotc/core/Periods.scala index ee877fb538d4..d6f4705dfcc9 100644 --- a/compiler/src/dotty/tools/dotc/core/Periods.scala +++ b/compiler/src/dotty/tools/dotc/core/Periods.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.core import Contexts._ import Phases.unfusedPhases -object Periods { +object Periods: /** The period containing the current period where denotations do not change. * We compute this by taking as first phase the first phase less or equal to @@ -35,7 +35,7 @@ object Periods { * * // Dmitry: sign == 0 isn't actually always true, in some cases phaseId == -1 is used for shifts, that easily creates code < 0 */ - class Period(val code: Int) extends AnyVal { + class Period(val code: Int) extends AnyVal: /** The run identifier of this period. */ def runId: RunId = code >>> (PhaseWidth * 2) @@ -53,7 +53,7 @@ object Periods { def containsPhaseId(id: PhaseId): Boolean = firstPhaseId <= id && id <= lastPhaseId /** Does this period contain given period? */ - def contains(that: Period): Boolean = { + def contains(that: Period): Boolean = // Let this = (r1, l1, d1), that = (r2, l2, d2) // where r = runid, l = last phase, d = duration - 1 // Then seen as intervals: @@ -73,7 +73,6 @@ object Periods { // q.e.d val lastDiff = (code - that.code) >>> PhaseWidth lastDiff + (that.code & PhaseMask ) <= (this.code & PhaseMask) - } /** Does this period overlap with given period? */ def overlaps(that: Period): Boolean = @@ -101,9 +100,8 @@ object Periods { def ==(that: Period): Boolean = this.code == that.code def !=(that: Period): Boolean = this.code != that.code - } - object Period { + object Period: /** The single-phase period consisting of given run id and phase id */ def apply(rid: RunId, pid: PhaseId): Period = @@ -116,7 +114,6 @@ object Periods { /** The interval consisting of all periods of given run id */ def allInRun(rid: RunId): Period = apply(rid, 0, PhaseMask) - } inline val NowhereCode = 0 final val Nowhere: Period = new Period(NowhereCode) @@ -141,4 +138,3 @@ object Periods { inline val PhaseWidth = 7 inline val PhaseMask = (1 << PhaseWidth) - 1 inline val MaxPossiblePhaseId = PhaseMask -} diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 3c4c45ab254a..dc38c473e1dc 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -22,37 +22,34 @@ import ast.{tpd, untpd} import scala.annotation.internal.sharable import scala.util.control.NonFatal -object Phases { +object Phases: inline def phaseOf(id: PhaseId)(using Context): Phase = ctx.base.phases(id) - @sharable object NoPhase extends Phase { + @sharable object NoPhase extends Phase: override def exists: Boolean = false def phaseName: String = "" def run(using Context): Unit = unsupported("run") def transform(ref: SingleDenotation)(using Context): SingleDenotation = unsupported("transform") - } - trait PhasesBase { + trait PhasesBase: this: ContextBase => // drop NoPhase at beginning def allPhases: Array[Phase] = (if (fusedPhases.nonEmpty) fusedPhases else phases).tail - object SomePhase extends Phase { + object SomePhase extends Phase: def phaseName: String = "" def run(using Context): Unit = unsupported("run") - } /** A sentinel transformer object */ - class TerminalPhase extends DenotTransformer { + class TerminalPhase extends DenotTransformer: def phaseName: String = "terminal" def run(using Context): Unit = unsupported("run") def transform(ref: SingleDenotation)(using Context): SingleDenotation = unsupported("transform") override def lastPhaseId(using Context): Int = id - } final def phasePlan: List[List[Phase]] = this.phasesPlan final def setPhasePlan(phasess: List[List[Phase]]): Unit = this.phasesPlan = phasess @@ -65,7 +62,7 @@ object Phases { phasesToSkip: List[String], stopBeforePhases: List[String], stopAfterPhases: List[String], - YCheckAfter: List[String])(using Context): List[Phase] = { + YCheckAfter: List[String])(using Context): List[Phase] = val fusedPhases = ListBuffer[Phase]() var prevPhases: Set[String] = Set.empty @@ -84,13 +81,13 @@ object Phases { var i = 0 - while (i < filteredPhases.length) { - if (filteredPhases(i).nonEmpty) { //could be empty due to filtering + while (i < filteredPhases.length) + if (filteredPhases(i).nonEmpty) //could be empty due to filtering val filteredPhaseBlock = filteredPhases(i) val phaseToAdd = - if (filteredPhaseBlock.length > 1) { + if (filteredPhaseBlock.length > 1) for (phase <- filteredPhaseBlock) - phase match { + phase match case p: MiniPhase => val unmetRequirements = p.runsAfterGroupsOf &~ prevPhases assert(unmetRequirements.isEmpty, @@ -98,34 +95,27 @@ object Phases { case _ => assert(false, s"Only tree transforms can be fused, ${phase.phaseName} can not be fused") - } val superPhase = new MegaPhase(filteredPhaseBlock.asInstanceOf[List[MiniPhase]].toArray) prevPhases ++= filteredPhaseBlock.map(_.phaseName) superPhase - } - else { // block of a single phase, no fusion + else // block of a single phase, no fusion val phase = filteredPhaseBlock.head prevPhases += phase.phaseName phase - } fusedPhases += phaseToAdd val shouldAddYCheck = filteredPhases(i).exists(_.isCheckable) && YCheckAfter.containsPhase(phaseToAdd) - if (shouldAddYCheck) { + if (shouldAddYCheck) val checker = new TreeChecker fusedPhases += checker - } - } i += 1 - } fusedPhases.toList - } /** Use the following phases in the order they are given. * The list should never contain NoPhase. * if fusion is enabled, phases in same subgroup will be fused to single phase. */ - final def usePhases(phasess: List[Phase], fuse: Boolean = true): Unit = { + final def usePhases(phasess: List[Phase], fuse: Boolean = true): Unit = val flatPhases = collection.mutable.ListBuffer[Phase]() @@ -141,23 +131,21 @@ object Phases { denotTransformers = new Array[DenotTransformer](phases.length) var phaseId = 0 - def nextPhaseId = { + def nextPhaseId = phaseId += 1 phaseId // starting from 1 as NoPhase is 0 - } - def checkRequirements(p: Phase) = { + def checkRequirements(p: Phase) = val unmetPrecedeRequirements = p.runsAfter -- phasesAfter assert(unmetPrecedeRequirements.isEmpty, s"phase ${p} has unmet requirement: ${unmetPrecedeRequirements.mkString(", ")} should precede this phase") phasesAfter += p.phaseName - } var i = 0 - while (i < phasess.length) { + while (i < phasess.length) val phase = phasess(i) - phase match { + phase match case p: MegaPhase => val miniPhases = p.miniPhases miniPhases.foreach{ phase => @@ -167,26 +155,22 @@ object Phases { case _ => phase.init(this, nextPhaseId) checkRequirements(phase) - } i += 1 - } phases.last.init(this, nextPhaseId) // init terminal phase i = phases.length var lastTransformerId = i - while (i > 0) { + while (i > 0) i -= 1 val phase = phases(i) - phase match { + phase match case transformer: DenotTransformer => lastTransformerId = i denotTransformers(i) = transformer case _ => - } nextDenotTransformerId(i) = lastTransformerId - } if (fuse) this.fusedPhases = (NoPhase :: phasess).toArray @@ -195,7 +179,6 @@ object Phases { config.println(s"Phases = ${phases.toList}") config.println(s"nextDenotTransformerId = ${nextDenotTransformerId.toList}") - } /** Unlink `phase` from Denot transformer chain. This means that * any denotation transformer defined by the phase will not be executed. @@ -255,7 +238,7 @@ object Phases { final def genBCodePhase: Phase = myGenBCodePhase final def checkCapturesPhase: Phase = myCheckCapturesPhase - private def setSpecificPhases() = { + private def setSpecificPhases() = def phaseOfClass(pclass: Class[?]) = phases.find(pclass.isInstance).getOrElse(NoPhase) myParserPhase = phaseOfClass(classOf[Parser]) @@ -282,13 +265,11 @@ object Phases { myGettersPhase = phaseOfClass(classOf[Getters]) myGenBCodePhase = phaseOfClass(classOf[GenBCode]) myCheckCapturesPhase = phaseOfClass(classOf[CheckCaptures]) - } final def isAfterTyper(phase: Phase): Boolean = phase.id > typerPhase.id final def isTyper(phase: Phase): Boolean = phase.id == typerPhase.id - } - abstract class Phase { + abstract class Phase: /** A name given to the `Phase` that can be used to debug the compiler. For * instance, it is possible to print trees after a given phase using: @@ -401,7 +382,7 @@ object Phases { final def sameBaseTypesStartId: Int = mySameBaseTypesStartId // id of first phase where all symbols are guaranteed to have the same base tpyes as in this phase - protected[Phases] def init(base: ContextBase, start: Int, end: Int): Unit = { + protected[Phases] def init(base: ContextBase, start: Int, end: Int): Unit = if (start >= FirstPhaseId) assert(myPeriod == Periods.InvalidPeriod, s"phase $this has already been used once; cannot be reused") assert(start <= Periods.MaxPossiblePhaseId, s"Too many phases, Period bits overflow") @@ -415,7 +396,6 @@ object Phases { mySameMembersStartId = if (changesMembers) id else prev.sameMembersStartId mySameParentsStartId = if (changesParents) id else prev.sameParentsStartId mySameBaseTypesStartId = if (changesBaseTypes) id else prev.sameBaseTypesStartId - } protected[Phases] def init(base: ContextBase, id: Int): Unit = init(base, id, id) @@ -444,7 +424,6 @@ object Phases { throw ex override def toString: String = phaseName - } def parserPhase(using Context): Phase = ctx.base.parserPhase def typerPhase(using Context): Phase = ctx.base.typerPhase @@ -476,4 +455,3 @@ object Phases { private def replace(oldPhaseClass: Class[? <: Phase], newPhases: Phase => List[Phase], current: List[List[Phase]]): List[List[Phase]] = current.map(_.flatMap(phase => if (oldPhaseClass.isInstance(phase)) newPhases(phase) else phase :: Nil)) -} diff --git a/compiler/src/dotty/tools/dotc/core/Scopes.scala b/compiler/src/dotty/tools/dotc/core/Scopes.scala index 99076b422358..6300187db523 100644 --- a/compiler/src/dotty/tools/dotc/core/Scopes.scala +++ b/compiler/src/dotty/tools/dotc/core/Scopes.scala @@ -20,7 +20,7 @@ import SymDenotations.NoDenotation import collection.mutable -object Scopes { +object Scopes: /** Maximal fill factor of hash table */ private inline val FillFactor = 2.0/3.0 @@ -43,7 +43,7 @@ object Scopes { */ type SymbolSynthesizer = Name => Context ?=> Symbol - class ScopeEntry private[Scopes] (val name: Name, _sym: Symbol, val owner: Scope) { + class ScopeEntry private[Scopes] (val name: Name, _sym: Symbol, val owner: Scope): var sym: Symbol = _sym @@ -56,7 +56,6 @@ object Scopes { var prev: ScopeEntry | Null = null override def toString: String = sym.toString - } /** A scope contains a set of symbols. It can be an extension * of some outer scope, from which it inherits all symbols. @@ -64,7 +63,7 @@ object Scopes { * or to delete them. These methods are provided by subclass * MutableScope. */ - abstract class Scope extends printing.Showable { + abstract class Scope extends printing.Showable: /** The last scope-entry from which all others are reachable via `prev` */ private[dotc] def lastEntry: ScopeEntry | Null @@ -93,26 +92,23 @@ object Scopes { def foreach[U](f: Symbol => U)(using Context): Unit = toList.foreach(f) /** Selects all Symbols of this Scope which satisfy a predicate. */ - def filter(p: Symbol => Boolean)(using Context): List[Symbol] = { + def filter(p: Symbol => Boolean)(using Context): List[Symbol] = ensureComplete() var syms: List[Symbol] = Nil var e = lastEntry - while ((e != null) && e.owner == this) { + while ((e != null) && e.owner == this) val sym = e.sym if (p(sym)) syms = sym :: syms e = e.prev - } syms - } /** Tests whether a predicate holds for at least one Symbol of this Scope. */ def exists(p: Symbol => Boolean)(using Context): Boolean = filter(p).nonEmpty /** Finds the first Symbol of this Scope satisfying a predicate, if any. */ - def find(p: Symbol => Boolean)(using Context): Symbol = filter(p) match { + def find(p: Symbol => Boolean)(using Context): Symbol = filter(p) match case sym :: _ => sym case _ => NoSymbol - } /** Returns a new mutable scope with the same content as this one. */ def cloneScope(using Context): MutableScope @@ -124,18 +120,16 @@ object Scopes { def lookupNextEntry(entry: ScopeEntry)(using Context): ScopeEntry | Null /** Lookup a symbol */ - final def lookup(name: Name)(using Context): Symbol = { + final def lookup(name: Name)(using Context): Symbol = val e = lookupEntry(name) if (e == null) NoSymbol else e.sym - } /** Returns an iterator yielding every symbol with given name in this scope. */ - final def lookupAll(name: Name)(using Context): Iterator[Symbol] = new Iterator[Symbol] { + final def lookupAll(name: Name)(using Context): Iterator[Symbol] = new Iterator[Symbol]: var e = lookupEntry(name) def hasNext: Boolean = e != null def next(): Symbol = { val r = e.nn.sym; e = lookupNextEntry(e.uncheckedNN); r } - } /** Does this scope contain a reference to `sym` when looking up `name`? */ final def contains(name: Name, sym: Symbol)(using Context): Boolean = @@ -147,30 +141,26 @@ object Scopes { * Symbols occur in the result in reverse order relative to their occurrence * in `this.toList`. */ - final def denotsNamed(name: Name)(using Context): PreDenotation = { + final def denotsNamed(name: Name)(using Context): PreDenotation = var syms: PreDenotation = NoDenotation var e = lookupEntry(name) - while (e != null) { + while (e != null) syms = syms union e.sym.denot e = lookupNextEntry(e) - } syms - } /** The scope that keeps only those symbols from this scope that match the * given predicates. If all symbols match, returns the scope itself, otherwise * a copy with the matching symbols. */ - final def filteredScope(p: Symbol => Boolean)(using Context): Scope = { + final def filteredScope(p: Symbol => Boolean)(using Context): Scope = var result: MutableScope | Null = null for (sym <- iterator) - if (!p(sym)) { + if (!p(sym)) if (result == null) result = cloneScope result.nn.unlink(sym) - } // TODO: improve flow typing to handle this case if (result == null) this else result.uncheckedNN - } def implicitDecls(using Context): List[TermRef] = Nil @@ -185,7 +175,6 @@ object Scopes { * makes sure that all names with `$`'s have been added. */ protected def ensureComplete()(using Context): Unit = () - } /** A subclass of Scope that defines methods for entering and * unlinking entries. @@ -194,7 +183,7 @@ object Scopes { * SynchronizedScope as mixin. */ class MutableScope protected[Scopes](initElems: ScopeEntry | Null, initSize: Int, val nestingLevel: Int) - extends Scope { + extends Scope: /** Scope shares elements with `base` */ protected[Scopes] def this(base: Scope)(using Context) = @@ -229,24 +218,21 @@ object Scopes { /** Clone scope, taking care not to force the denotations of any symbols in the scope. */ - def cloneScope(using Context): MutableScope = { + def cloneScope(using Context): MutableScope = val entries = new mutable.ArrayBuffer[ScopeEntry] var e = lastEntry - while ((e != null) && e.owner == this) { + while ((e != null) && e.owner == this) entries += e e = e.prev - } val scope = newScopeLikeThis() - for (i <- entries.length - 1 to 0 by -1) { + for (i <- entries.length - 1 to 0 by -1) val e = entries(i) scope.newScopeEntry(e.name, e.sym) - } scope.synthesize = synthesize scope - } /** create and enter a scope entry with given name and symbol */ - protected def newScopeEntry(name: Name, sym: Symbol)(using Context): ScopeEntry = { + protected def newScopeEntry(name: Name, sym: Symbol)(using Context): ScopeEntry = ensureCapacity(if (hashTable != null) hashTable.uncheckedNN.length else MinHashedScopeSize) val e = new ScopeEntry(name, sym, this) e.prev = lastEntry @@ -255,170 +241,142 @@ object Scopes { size += 1 elemsCache = null e - } - private def enterInHash(e: ScopeEntry)(using Context): Unit = { + private def enterInHash(e: ScopeEntry)(using Context): Unit = val idx = e.name.hashCode & (hashTable.nn.length - 1) e.tail = hashTable.nn(idx) assert(e.tail != e) hashTable.nn(idx) = e - } /** enter a symbol in this scope. */ - final def enter[T <: Symbol](sym: T)(using Context): T = { + final def enter[T <: Symbol](sym: T)(using Context): T = if (sym.isType && ctx.phaseId <= typerPhase.id) assert(lookup(sym.name) == NoSymbol, s"duplicate ${sym.debugString}; previous was ${lookup(sym.name).debugString}") // !!! DEBUG enter(sym.name, sym) - } - final def enter[T <: Symbol](name: Name, sym: T)(using Context): T = { + final def enter[T <: Symbol](name: Name, sym: T)(using Context): T = newScopeEntry(name, sym) sym - } /** enter a symbol, asserting that no symbol with same name exists in scope */ - final def enterUnique(sym: Symbol)(using Context): Unit = { + final def enterUnique(sym: Symbol)(using Context): Unit = assert(lookup(sym.name) == NoSymbol, (sym.showLocated, lookup(sym.name).showLocated)) enter(sym) - } private def ensureCapacity(tableSize: Int)(using Context): Unit = if (size >= tableSize * FillFactor) createHash(tableSize * 2) private def createHash(tableSize: Int)(using Context): Unit = if (size > tableSize * FillFactor) createHash(tableSize * 2) - else { + else hashTable = new Array[ScopeEntry | Null](tableSize) enterAllInHash(lastEntry) // checkConsistent() // DEBUG - } private def enterAllInHash(e: ScopeEntry | Null, n: Int = 0)(using Context): Unit = if (e != null) - if (n < MaxRecursions) { + if (n < MaxRecursions) enterAllInHash(e.prev, n + 1) enterInHash(e) - } - else { + else var entries: List[ScopeEntry] = List() var ee: ScopeEntry | Null = e - while (ee != null) { + while (ee != null) entries = ee :: entries ee = ee.prev - } entries foreach enterInHash - } /** Remove entry from this scope (which is required to be present) */ - final def unlink(e: ScopeEntry)(using Context): Unit = { + final def unlink(e: ScopeEntry)(using Context): Unit = if (lastEntry == e) lastEntry = e.prev - else { + else var e1 = lastEntry.nn while (e1.prev != e) e1 = e1.prev.nn e1.prev = e.prev - } - if (hashTable != null) { + if (hashTable != null) val index = e.name.hashCode & (hashTable.nn.length - 1) var e1 = hashTable.nn(index) if (e1 == e) hashTable.nn(index) = e.tail - else { + else while (e1.nn.tail != e) e1 = e1.nn.tail e1.nn.tail = e.tail - } - } elemsCache = null size -= 1 - } /** remove symbol from this scope if it is present */ final def unlink(sym: Symbol)(using Context): Unit = unlink(sym, sym.name) /** remove symbol from this scope if it is present under the given name */ - final def unlink(sym: Symbol, name: Name)(using Context): Unit = { + final def unlink(sym: Symbol, name: Name)(using Context): Unit = var e = lookupEntry(name) - while (e != null) { + while (e != null) if (e.sym == sym) unlink(e) e = lookupNextEntry(e) - } - } /** Replace symbol `prev` (if it exists in current scope) by symbol `replacement`. * @pre `prev` and `replacement` have the same name. */ - final def replace(prev: Symbol, replacement: Symbol)(using Context): Unit = { + final def replace(prev: Symbol, replacement: Symbol)(using Context): Unit = require(prev.name == replacement.name) var e = lookupEntry(prev.name) - while (e != null) { + while (e != null) if (e.sym == prev) e.sym = replacement e = lookupNextEntry(e) - } elemsCache = null - } /** Lookup a symbol entry matching given name. */ - override def lookupEntry(name: Name)(using Context): ScopeEntry | Null = { + override def lookupEntry(name: Name)(using Context): ScopeEntry | Null = var e: ScopeEntry | Null = null - if (hashTable != null) { + if (hashTable != null) e = hashTable.nn(name.hashCode & (hashTable.nn.length - 1)) while ((e != null) && e.name != name) e = e.tail - } - else { + else e = lastEntry while ((e != null) && e.name != name) e = e.prev - } - if ((e == null) && (synthesize != null)) { + if ((e == null) && (synthesize != null)) val sym = synthesize.uncheckedNN(name) if (sym.exists) newScopeEntry(sym.name, sym) else e - } else e - } /** lookup next entry with same name as this one */ - override final def lookupNextEntry(entry: ScopeEntry)(using Context): ScopeEntry | Null = { + override final def lookupNextEntry(entry: ScopeEntry)(using Context): ScopeEntry | Null = var e: ScopeEntry | Null = entry if (hashTable != null) while ({ e = e.nn.tail ; (e != null) && e.uncheckedNN.name != entry.name }) () else while ({ e = e.nn.prev ; (e != null) && e.uncheckedNN.name != entry.name }) () e - } /** Returns all symbols as a list in the order they were entered in this scope. * Does _not_ include the elements of inherited scopes. */ - override final def toList(using Context): List[Symbol] = { - if (elemsCache == null) { + override final def toList(using Context): List[Symbol] = + if (elemsCache == null) ensureComplete() elemsCache = Nil var e = lastEntry - while ((e != null) && e.owner == this) { + while ((e != null) && e.owner == this) elemsCache = e.sym :: elemsCache.nn e = e.prev - } - } elemsCache.nn - } - override def implicitDecls(using Context): List[TermRef] = { + override def implicitDecls(using Context): List[TermRef] = ensureComplete() var irefs = new mutable.ListBuffer[TermRef] var e = lastEntry - while (e != null) { - if (e.sym.isOneOf(GivenOrImplicitVal)) { + while (e != null) + if (e.sym.isOneOf(GivenOrImplicitVal)) val d = e.sym.denot irefs += TermRef(NoPrefix, d.symbol.asTerm).withDenot(d) - } e = e.prev - } irefs.toList - } /** Vanilla scope - symbols are stored in declaration order. */ @@ -427,17 +385,14 @@ object Scopes { override def openForMutations: MutableScope = this /** Check that all symbols in this scope are in their correct hashtable buckets. */ - override def checkConsistent()(using Context): Unit = { + override def checkConsistent()(using Context): Unit = ensureComplete() var e = lastEntry - while (e != null) { + while (e != null) var e1 = lookupEntry(e.name) while (e1 != e && e1 != null) e1 = lookupNextEntry(e1) assert(e1 == e, s"PANIC: Entry ${e.nn.name} is badly linked") e = e.prev - } - } - } /** Create a new scope */ def newScope(using Context): MutableScope = @@ -449,11 +404,10 @@ object Scopes { def newNestedScope(outer: Scope)(using Context): MutableScope = new MutableScope(outer) /** Create a new scope with given initial elements */ - def newScopeWith(elems: Symbol*)(using Context): MutableScope = { + def newScopeWith(elems: Symbol*)(using Context): MutableScope = val scope = newScope elems foreach scope.enter scope - } /** Transform scope of members of `owner` using operation `op` * This is overridden by the reflective compiler to avoid creating new scopes for packages @@ -462,7 +416,7 @@ object Scopes { /** The empty scope (immutable). */ - object EmptyScope extends Scope { + object EmptyScope extends Scope: override private[dotc] def lastEntry: ScopeEntry | Null = null override def size: Int = 0 override def nestingLevel: Int = 0 @@ -470,5 +424,3 @@ object Scopes { override def cloneScope(using Context): MutableScope = newScope(nestingLevel) override def lookupEntry(name: Name)(using Context): ScopeEntry | Null = null override def lookupNextEntry(entry: ScopeEntry)(using Context): ScopeEntry | Null = null - } -} diff --git a/compiler/src/dotty/tools/dotc/core/Signature.scala b/compiler/src/dotty/tools/dotc/core/Signature.scala index bd744ec01846..a372678aa586 100644 --- a/compiler/src/dotty/tools/dotc/core/Signature.scala +++ b/compiler/src/dotty/tools/dotc/core/Signature.scala @@ -46,7 +46,7 @@ import Signature._ * - tpnme.WILDCARD Arises from a Wildcard or error type * - tpnme.Uninstantiated Arises from an uninstantiated type variable */ -case class Signature(paramsSig: List[ParamSig], resSig: TypeName) { +case class Signature(paramsSig: List[ParamSig], resSig: TypeName): /** Two names are consistent if they are the same or one of them is tpnme.Uninstantiated */ private def consistent(name1: ParamSig, name2: ParamSig) = @@ -56,26 +56,23 @@ case class Signature(paramsSig: List[ParamSig], resSig: TypeName) { * This is the case if all parameter signatures are _consistent_, i.e. they are either * equal or on of them is tpnme.Uninstantiated. */ - final def consistentParams(that: Signature)(using Context): Boolean = { + final def consistentParams(that: Signature)(using Context): Boolean = @tailrec def loop(names1: List[ParamSig], names2: List[ParamSig]): Boolean = if (names1.isEmpty) names2.isEmpty else !names2.isEmpty && consistent(names1.head, names2.head) && loop(names1.tail, names2.tail) loop(this.paramsSig, that.paramsSig) - } /** `that` signature, but keeping all corresponding parts of `this` signature. */ - final def updateWith(that: Signature): Signature = { + final def updateWith(that: Signature): Signature = def update[T <: ParamSig](name1: T, name2: T): T = if (consistent(name1, name2)) name1 else name2 if (this == that) this else if (!this.paramsSig.hasSameLengthAs(that.paramsSig)) that - else { + else val mapped = Signature( this.paramsSig.zipWithConserve(that.paramsSig)(update), update(this.resSig, that.resSig)) if (mapped == this) this else mapped - } - } /** The degree to which this signature matches `that`. * If parameter signatures are consistent and result types names match (i.e. they are the same @@ -124,15 +121,14 @@ case class Signature(paramsSig: List[ParamSig], resSig: TypeName) { */ def isUnderDefined(using Context): Boolean = paramsSig.contains(tpnme.Uninstantiated) || resSig == tpnme.Uninstantiated -} -object Signature { +object Signature: /** A parameter signature, see the documentation of `Signature` for more information. */ type ParamSig = TypeName | Int // Erasure means that our Ints will be boxed, but Integer#valueOf caches // small values, so the performance hit should be minimal. - enum MatchDegree { + enum MatchDegree: /** The signatures are unrelated. */ case NoMatch /** The parameter signatures are equivalent. */ @@ -144,7 +140,6 @@ object Signature { case MethodNotAMethodMatch /** The parameter and result type signatures are equivalent. */ case FullMatch - } export MatchDegree._ /** The signature of everything that's not a method, i.e. that has @@ -162,36 +157,28 @@ object Signature { * otherwise the signature will change once the contained type variables have * been instantiated. */ - def apply(resultType: Type, sourceLanguage: SourceLanguage)(using Context): Signature = { + def apply(resultType: Type, sourceLanguage: SourceLanguage)(using Context): Signature = assert(!resultType.isInstanceOf[ExprType]) apply(Nil, sigName(resultType, sourceLanguage)) - } - val lexicographicOrdering: Ordering[Signature] = new Ordering[Signature] { - val paramSigOrdering: Ordering[Signature.ParamSig] = new Ordering[Signature.ParamSig] { - def compare(x: ParamSig, y: ParamSig): Int = x match { // `(x, y) match` leads to extra allocations + val lexicographicOrdering: Ordering[Signature] = new Ordering[Signature]: + val paramSigOrdering: Ordering[Signature.ParamSig] = new Ordering[Signature.ParamSig]: + def compare(x: ParamSig, y: ParamSig): Int = x match // `(x, y) match` leads to extra allocations case x: TypeName => - y match { + y match case y: TypeName => // `Ordering[TypeName]` doesn't work due to `Ordering` still being invariant summon[Ordering[Name]].compare(x, y) case y: Int => 1 - } case x: Int => - y match { + y match case y: Name => -1 case y: Int => x - y - } - } - } - def compare(x: Signature, y: Signature): Int = { + def compare(x: Signature, y: Signature): Int = import scala.math.Ordering.Implicits.seqOrdering val paramsOrdering = seqOrdering(paramSigOrdering).compare(x.paramsSig, y.paramsSig) if (paramsOrdering != 0) paramsOrdering else summon[Ordering[Name]].compare(x.resSig, y.resSig) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index cd9526b27a21..4ee930fc919d 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -8,11 +8,11 @@ import Names._ import Symbols._ import Contexts._ -object StdNames { +object StdNames: /** Base strings from which synthetic names are derived. */ - object str { + object str: inline val SETTER_SUFFIX = "_=" inline val EXPAND_SEPARATOR = "$$" inline val TRAIT_SETTER_SEPARATOR = "$_setter_$" @@ -44,9 +44,8 @@ object StdNames { @sharable private val disallowed = java.util.regex.Pattern.compile("""[<>]""").nn def sanitize(str: String): String = disallowed.matcher(str).nn.replaceAll("""\$""").nn - } - abstract class DefinedNames[N <: Name] { + abstract class DefinedNames[N <: Name]: protected implicit def fromString(s: String): N protected def fromName(name: Name): N = fromString(name.toString) @@ -54,9 +53,8 @@ object StdNames { protected def kw(name: N): N = { kws += name; name } final val keywords: collection.Set[N] = kws - } - abstract class ScalaNames[N <: Name] extends DefinedNames[N] { + abstract class ScalaNames[N <: Name] extends DefinedNames[N]: protected def encode(s: String): N = fromName(fromString(s).encode) // Keywords, need to come first ----------------------- @@ -660,7 +658,7 @@ object StdNames { val falseModuleClassNames: Set[N] = Set(nothingClass, nullClass, nothingRuntimeClass, nullRuntimeClass) // unencoded operators - object raw { + object raw: final val AMP : N = "&" final val BANG : N = "!" final val BAR : N = "|" @@ -682,9 +680,8 @@ object StdNames { final val PLUS_USCORE : N = "+_" final val isUnary: Set[Name] = Set(MINUS, PLUS, TILDE, BANG) - } - object specializedTypeNames { + object specializedTypeNames: final val Boolean: N = "Z" final val Byte: N = "B" final val Char: N = "C" @@ -699,7 +696,6 @@ object StdNames { final val prefix: N = "$m" final val separator: N = "c" final val suffix: N = "$sp" - } // value-conversion methods val toByte: N = "toByte" @@ -786,12 +782,11 @@ object StdNames { val isBoxedNumberOrBoolean: N = "isBoxedNumberOrBoolean" val isBoxedNumber: N = "isBoxedNumber" - } - class ScalaTermNames extends ScalaNames[TermName] { + class ScalaTermNames extends ScalaNames[TermName]: protected implicit def fromString(s: String): TermName = termName(s) - def syntheticParamName(i: Int): TermName = (i: @switch) match { + def syntheticParamName(i: Int): TermName = (i: @switch) match case 0 => x_0 case 1 => x_1 case 2 => x_2 @@ -803,9 +798,8 @@ object StdNames { case 8 => x_8 case 9 => x_9 case _ => termName("x$" + i) - } - def productAccessorName(j: Int): TermName = (j: @switch) match { + def productAccessorName(j: Int): TermName = (j: @switch) match case 1 => nme._1 case 2 => nme._2 case 3 => nme._3 @@ -829,7 +823,6 @@ object StdNames { case 21 => nme._21 case 22 => nme._22 case _ => termName("_" + j) - } def localDummyName(clazz: Symbol)(using Context): TermName = termName(str.LOCALDUMMY_PREFIX + clazz.name + ">") @@ -838,17 +831,15 @@ object StdNames { def selectorName(n: Int): TermName = productAccessorName(n + 1) - object primitive { + object primitive: val arrayApply: TermName = "[]apply" val arrayUpdate: TermName = "[]update" val arrayLength: TermName = "[]length" val names: Set[Name] = Set(arrayApply, arrayUpdate, arrayLength) - } def isPrimitiveName(name: Name): Boolean = primitive.names.contains(name) - } - class ScalaTypeNames extends ScalaNames[TypeName] { + class ScalaTypeNames extends ScalaNames[TypeName]: protected implicit def fromString(s: String): TypeName = typeName(s) def syntheticTypeParamName(i: Int): TypeName = "X" + i @@ -859,9 +850,8 @@ object StdNames { val JFunctionPrefix: Seq[TypeName] = (0 to 2).map(i => s"scala.runtime.java8.JFunction${i}") val JProcedure: Seq[TypeName] = (0 to 22).map(i => s"scala.runtime.function.JProcedure${i}") - } - abstract class JavaNames[N <: Name] extends DefinedNames[N] { + abstract class JavaNames[N <: Name] extends DefinedNames[N]: final val ABSTRACTkw: N = kw("abstract") final val ASSERTkw: N = kw("assert") final val BOOLEANkw: N = kw("boolean") @@ -949,19 +939,15 @@ object StdNames { final val BeanProperty: N = "scala.beans.BeanProperty" final val BooleanBeanProperty: N = "scala.beans.BooleanBeanProperty" final val JavaSerializable: N = "java.io.Serializable" - } - class JavaTermNames extends JavaNames[TermName] { + class JavaTermNames extends JavaNames[TermName]: protected def fromString(s: String): TermName = termName(s) - } - class JavaTypeNames extends JavaNames[TypeName] { + class JavaTypeNames extends JavaNames[TypeName]: protected def fromString(s: String): TypeName = typeName(s) - } val nme: ScalaTermNames = new ScalaTermNames val tpnme: ScalaTypeNames = new ScalaTypeNames val jnme: JavaTermNames = new JavaTermNames val jtpnme: JavaTypeNames = new JavaTypeNames -} diff --git a/compiler/src/dotty/tools/dotc/core/Substituters.scala b/compiler/src/dotty/tools/dotc/core/Substituters.scala index 3e32340b21bd..ad0a419508fa 100644 --- a/compiler/src/dotty/tools/dotc/core/Substituters.scala +++ b/compiler/src/dotty/tools/dotc/core/Substituters.scala @@ -10,7 +10,7 @@ import cc.CaptureSet.IdempotentCaptRefMap object Substituters: final def subst(tp: Type, from: BindingType, to: BindingType, theMap: SubstBindingMap | Null)(using Context): Type = - tp match { + tp match case tp: BoundType => if (tp.binder eq from) tp.copyBoundType(to.asInstanceOf[tp.BT]) else tp case tp: NamedType => @@ -23,10 +23,9 @@ object Substituters: case _ => (if (theMap != null) theMap else new SubstBindingMap(from, to)) .mapOver(tp) - } final def subst1(tp: Type, from: Symbol, to: Type, theMap: Subst1Map | Null)(using Context): Type = - tp match { + tp match case tp: NamedType => val sym = tp.symbol if (sym eq from) return to @@ -37,10 +36,9 @@ object Substituters: case _ => (if (theMap != null) theMap else new Subst1Map(from, to)) .mapOver(tp) - } final def subst2(tp: Type, from1: Symbol, to1: Type, from2: Symbol, to2: Type, theMap: Subst2Map | Null)(using Context): Type = - tp match { + tp match case tp: NamedType => val sym = tp.symbol if (sym eq from1) return to1 @@ -52,19 +50,17 @@ object Substituters: case _ => (if (theMap != null) theMap else new Subst2Map(from1, to1, from2, to2)) .mapOver(tp) - } final def subst(tp: Type, from: List[Symbol], to: List[Type], theMap: SubstMap | Null)(using Context): Type = - tp match { + tp match case tp: NamedType => val sym = tp.symbol var fs = from var ts = to - while (fs.nonEmpty && ts.nonEmpty) { + while (fs.nonEmpty && ts.nonEmpty) if (fs.head eq sym) return ts.head fs = fs.tail ts = ts.tail - } if (tp.prefix `eq` NoPrefix) tp else tp.derivedSelect(subst(tp.prefix, from, to, theMap)) case _: ThisType | _: BoundType => @@ -72,41 +68,37 @@ object Substituters: case _ => (if (theMap != null) theMap else new SubstMap(from, to)) .mapOver(tp) - } final def substSym(tp: Type, from: List[Symbol], to: List[Symbol], theMap: SubstSymMap | Null)(using Context): Type = - tp match { + tp match case tp: NamedType => val sym = tp.symbol var fs = from var ts = to - while (fs.nonEmpty) { + while (fs.nonEmpty) if (fs.head eq sym) return substSym(tp.prefix, from, to, theMap) select ts.head fs = fs.tail ts = ts.tail - } if (tp.prefix `eq` NoPrefix) tp else tp.derivedSelect(substSym(tp.prefix, from, to, theMap)) case tp: ThisType => val sym = tp.cls var fs = from var ts = to - while (fs.nonEmpty) { + while (fs.nonEmpty) if (fs.head eq sym) return ts.head.asClass.thisType fs = fs.tail ts = ts.tail - } tp case _: BoundType => tp case _ => (if (theMap != null) theMap else new SubstSymMap(from, to)) .mapOver(tp) - } final def substThis(tp: Type, from: ClassSymbol, to: Type, theMap: SubstThisMap | Null)(using Context): Type = - tp match { + tp match case tp: ThisType => if (tp.cls eq from) to else tp case tp: NamedType => @@ -117,10 +109,9 @@ object Substituters: case _ => (if (theMap != null) theMap else new SubstThisMap(from, to)) .mapOver(tp) - } final def substRecThis(tp: Type, from: Type, to: Type, theMap: SubstRecThisMap | Null)(using Context): Type = - tp match { + tp match case tp @ RecThis(binder) => if (binder eq from) to else tp case tp: NamedType => @@ -131,10 +122,9 @@ object Substituters: case _ => (if (theMap != null) theMap else new SubstRecThisMap(from, to)) .mapOver(tp) - } final def substParam(tp: Type, from: ParamRef, to: Type, theMap: SubstParamMap | Null)(using Context): Type = - tp match { + tp match case tp: BoundType => if (tp == from) to else tp case tp: NamedType => @@ -145,10 +135,9 @@ object Substituters: case _ => (if (theMap != null) theMap else new SubstParamMap(from, to)) .mapOver(tp) - } final def substParams(tp: Type, from: BindingType, to: List[Type], theMap: SubstParamsMap | Null)(using Context): Type = - tp match { + tp match case tp: ParamRef => if (tp.binder == from) to(tp.paramNum) else tp case tp: NamedType => @@ -161,67 +150,53 @@ object Substituters: case _ => (if (theMap != null) theMap else new SubstParamsMap(from, to)) .mapOver(tp) - } - final class SubstBindingMap(from: BindingType, to: BindingType)(using Context) extends DeepTypeMap, BiTypeMap { + final class SubstBindingMap(from: BindingType, to: BindingType)(using Context) extends DeepTypeMap, BiTypeMap: def apply(tp: Type): Type = subst(tp, from, to, this)(using mapCtx) def inverse(tp: Type): Type = tp.subst(to, from) - } - final class Subst1Map(from: Symbol, to: Type)(using Context) extends DeepTypeMap { + final class Subst1Map(from: Symbol, to: Type)(using Context) extends DeepTypeMap: def apply(tp: Type): Type = subst1(tp, from, to, this)(using mapCtx) - } - final class Subst2Map(from1: Symbol, to1: Type, from2: Symbol, to2: Type)(using Context) extends DeepTypeMap { + final class Subst2Map(from1: Symbol, to1: Type, from2: Symbol, to2: Type)(using Context) extends DeepTypeMap: def apply(tp: Type): Type = subst2(tp, from1, to1, from2, to2, this)(using mapCtx) - } - final class SubstMap(from: List[Symbol], to: List[Type])(using Context) extends DeepTypeMap { + final class SubstMap(from: List[Symbol], to: List[Type])(using Context) extends DeepTypeMap: def apply(tp: Type): Type = subst(tp, from, to, this)(using mapCtx) - } - final class SubstSymMap(from: List[Symbol], to: List[Symbol])(using Context) extends DeepTypeMap, BiTypeMap { + final class SubstSymMap(from: List[Symbol], to: List[Symbol])(using Context) extends DeepTypeMap, BiTypeMap: def apply(tp: Type): Type = substSym(tp, from, to, this)(using mapCtx) def inverse(tp: Type) = tp.substSym(to, from) // implicitly requires that `to` contains no duplicates. - } - final class SubstThisMap(from: ClassSymbol, to: Type)(using Context) extends DeepTypeMap { + final class SubstThisMap(from: ClassSymbol, to: Type)(using Context) extends DeepTypeMap: def apply(tp: Type): Type = substThis(tp, from, to, this)(using mapCtx) - } - final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap { + final class SubstRecThisMap(from: Type, to: Type)(using Context) extends DeepTypeMap: def apply(tp: Type): Type = substRecThis(tp, from, to, this)(using mapCtx) - } - final class SubstParamMap(from: ParamRef, to: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap { + final class SubstParamMap(from: ParamRef, to: Type)(using Context) extends DeepTypeMap, IdempotentCaptRefMap: def apply(tp: Type): Type = substParam(tp, from, to, this)(using mapCtx) - } - final class SubstParamsMap(from: BindingType, to: List[Type])(using Context) extends DeepTypeMap, IdempotentCaptRefMap { + final class SubstParamsMap(from: BindingType, to: List[Type])(using Context) extends DeepTypeMap, IdempotentCaptRefMap: def apply(tp: Type): Type = substParams(tp, from, to, this)(using mapCtx) - } /** An approximating substitution that can handle wildcards in the `to` list */ - final class SubstApproxMap(from: List[Symbol], to: List[Type])(using Context) extends ApproximatingTypeMap { - def apply(tp: Type): Type = tp match { + final class SubstApproxMap(from: List[Symbol], to: List[Type])(using Context) extends ApproximatingTypeMap: + def apply(tp: Type): Type = tp match case tp: NamedType => val sym = tp.symbol var fs = from var ts = to - while (fs.nonEmpty && ts.nonEmpty) { + while (fs.nonEmpty && ts.nonEmpty) if (fs.head eq sym) - return ts.head match { + return ts.head match case TypeBounds(lo, hi) => range(lo, hi) case tp1 => tp1 - } fs = fs.tail ts = ts.tail - } if (tp.prefix `eq` NoPrefix) tp else derivedSelect(tp, apply(tp.prefix)) case _: ThisType | _: BoundType => tp case _ => mapOver(tp) - } - } end Substituters diff --git a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala index 304840396641..c81eae4724c9 100644 --- a/compiler/src/dotty/tools/dotc/core/SymDenotations.scala +++ b/compiler/src/dotty/tools/dotc/core/SymDenotations.scala @@ -28,7 +28,7 @@ import cc.{CapturingType, derivedCapturingType, Setup, EventuallyCapturingType, import scala.annotation.internal.sharable -object SymDenotations { +object SymDenotations: /** A sym-denotation represents the contents of a definition * during a period. @@ -39,7 +39,7 @@ object SymDenotations { final val name: Name, initFlags: FlagSet, initInfo: Type, - initPrivateWithin: Symbol = NoSymbol) extends SingleDenotation(symbol, initInfo, name.isTypeName) { + initPrivateWithin: Symbol = NoSymbol) extends SingleDenotation(symbol, initInfo, name.isTypeName): //assert(symbol.id != 4940, name) @@ -141,14 +141,13 @@ object SymDenotations { final def infoOrCompleter: Type = myInfo /** Optionally, the info if it is completed */ - final def unforcedInfo: Option[Type] = myInfo match { + final def unforcedInfo: Option[Type] = myInfo match case myInfo: LazyType => None case _ => Some(myInfo) - } final def completeFrom(completer: LazyType)(using Context): Unit = if completer.needsCompletion(this) then - if (Config.showCompletions) { + if (Config.showCompletions) println(i"${" " * indent}completing ${if (isType) "type" else "val"} $name") indent += 1 @@ -157,24 +156,20 @@ object SymDenotations { // completions.println(s"completing ${this.debugString}") try atPhase(validFor.firstPhaseId)(completer.complete(this)) - catch { + catch case ex: CyclicReference => println(s"error while completing ${this.debugString}") throw ex - } - finally { + finally indent -= 1 println(i"${" " * indent}completed $name in $owner") - } - } - else { + else if (myFlags.is(Touched)) throw CyclicReference(this)(using ctx.withOwner(symbol)) myFlags |= Touched atPhase(validFor.firstPhaseId)(completer.complete(this)) - } - protected[dotc] def info_=(tp: Type): Unit = { + protected[dotc] def info_=(tp: Type): Unit = /* // DEBUG def illegal: String = s"illegal type for $this: $tp" if (this is Module) // make sure module invariants that allow moduleClass and sourceModule to work are kept. @@ -187,7 +182,6 @@ object SymDenotations { */ if (Config.checkNoSkolemsInInfo) assertNoSkolems(tp) myInfo = tp - } /** The name, except * - if this is a module class, strip the module class suffix @@ -201,7 +195,7 @@ object SymDenotations { /** The privateWithin boundary, NoSymbol if no boundary is given. */ @tailrec - final def privateWithin(using Context): Symbol = myInfo match { + final def privateWithin(using Context): Symbol = myInfo match case myInfo: ModuleCompleter => // Instead of completing the ModuleCompleter, we can get `privateWithin` // directly from the module class, which might require less completions. @@ -213,23 +207,20 @@ object SymDenotations { case _ => // Otherwise, no completion is necessary, see the preconditions of `markAbsent()`. myPrivateWithin - } /** Set privateWithin, prefer setting it at symbol-creation time instead if * possible. * @pre `isCompleting` is false, or this is a ModuleCompleter or SymbolLoader */ - protected[dotc] final def setPrivateWithin(pw: Symbol)(using Context): Unit = { + protected[dotc] final def setPrivateWithin(pw: Symbol)(using Context): Unit = if (isCompleting) assert(myInfo.isInstanceOf[ModuleCompleter | SymbolLoader], s"Illegal call to `setPrivateWithin($pw)` while completing $this using completer $myInfo") myPrivateWithin = pw - } /** The annotations of this denotation */ - final def annotations(using Context): List[Annotation] = { + final def annotations(using Context): List[Annotation] = ensureCompleted(); myAnnotations - } /** The annotations without ensuring that the symbol is completed. * Used for diagnostics where we don't want to force symbols. @@ -266,19 +257,17 @@ object SymDenotations { /** Optionally, the annotation matching the given class symbol */ final def getAnnotation(cls: Symbol)(using Context): Option[Annotation] = - dropOtherAnnotations(annotations, cls) match { + dropOtherAnnotations(annotations, cls) match case annot :: _ => Some(annot) case nil => None - } /** The same as getAnnotation, but without ensuring * that the symbol carrying the annotation is completed */ final def unforcedAnnotation(cls: Symbol)(using Context): Option[Annotation] = - dropOtherAnnotations(myAnnotations, cls) match { + dropOtherAnnotations(myAnnotations, cls) match case annot :: _ => Some(annot) case nil => None - } /** Add given annotation to the annotations of this denotation */ final def addAnnotation(annot: Annotation): Unit = @@ -293,20 +282,18 @@ object SymDenotations { annotations = myAnnotations.filterNot(_ matches cls) /** Remove any annotations with same class as `annot`, and add `annot` */ - final def updateAnnotation(annot: Annotation)(using Context): Unit = { + final def updateAnnotation(annot: Annotation)(using Context): Unit = removeAnnotation(annot.symbol) addAnnotation(annot) - } /** Add all given annotations to this symbol */ final def addAnnotations(annots: IterableOnce[Annotation])(using Context): Unit = annots.iterator.foreach(addAnnotation) @tailrec - private def dropOtherAnnotations(anns: List[Annotation], cls: Symbol)(using Context): List[Annotation] = anns match { + private def dropOtherAnnotations(anns: List[Annotation], cls: Symbol)(using Context): List[Annotation] = anns match case ann :: rest => if (ann matches cls) anns else dropOtherAnnotations(rest, cls) case Nil => Nil - } /** If this is a method, the parameter symbols, by section. * Both type and value parameters are included. Empty sections are skipped. @@ -380,11 +367,10 @@ object SymDenotations { * * @see ensureCompleted */ - final def completeOnce()(using Context): Unit = myInfo match { + final def completeOnce()(using Context): Unit = myInfo match case myInfo: LazyType => completeFrom(myInfo) case _ => - } /** Make sure this denotation is fully completed. * @@ -402,23 +388,21 @@ object SymDenotations { * 3. When playing it safe in order not to raise CylicReferences, e.g. for printing things * or taking more efficient shortcuts (e.g. the stillValid test). */ - final def unforcedDecls(using Context): Scope = myInfo match { + final def unforcedDecls(using Context): Scope = myInfo match case cinfo: LazyType => val knownDecls = cinfo.decls if (knownDecls ne EmptyScope) knownDecls else { completeOnce(); unforcedDecls } case _ => info.decls - } /** If this is a package class, the symbols entered in it * before it is completed. (this is needed to eagerly enter synthetic * aliases such as AnyRef into a package class without forcing it. * Right now, the only usage is for the AnyRef alias in Definitions. */ - final private[core] def currentPackageDecls(using Context): MutableScope = myInfo match { + final private[core] def currentPackageDecls(using Context): MutableScope = myInfo match case pinfo: SymbolLoaders.PackageLoader => pinfo.currentDecls case _ => unforcedDecls.openForMutations - } /** If this is an opaque alias, replace the right hand side `info` * by appropriate bounds and store `info` in the refinement of the @@ -436,13 +420,13 @@ object SymDenotations { def setAlias(tp: Type) = def recur(self: Type): Unit = self match case RefinedType(parent, name, rinfo) => rinfo match - case TypeAlias(lzy: LazyRef) if name == this.name => - if !lzy.completed then - lzy.update(tp) - else - throw CyclicReference(this) - case _ => - recur(parent) + case TypeAlias(lzy: LazyRef) if name == this.name => + if !lzy.completed then + lzy.update(tp) + else + throw CyclicReference(this) + case _ => + recur(parent) recur(owner.asClass.givenSelfType) end setAlias @@ -492,13 +476,12 @@ object SymDenotations { def fullNameSeparated(prefixKind: QualifiedNameKind, kind: QualifiedNameKind, name: Name)(using Context): Name = if (symbol == NoSymbol || isEffectiveRoot || kind == FlatName && is(PackageClass)) name - else { + else var filler = "" var encl = symbol - while (!encl.isClass && !encl.isPackageObject) { + while (!encl.isClass && !encl.isPackageObject) encl = encl.owner filler += "_$" - } var prefix = encl.fullNameSeparated(prefixKind) if (kind.separator == "$") // duplicate scalac's behavior: don't write a double '$$' for module class members. @@ -506,11 +489,9 @@ object SymDenotations { def qualify(n: SimpleName) = val qn = kind(prefix.toTermName, if (filler.isEmpty) n else termName(filler + n)) if kind == FlatName && !encl.is(JavaDefined) then qn.compactified else qn - val fn = name.replaceDeep { + val fn = name.replaceDeep: case n: SimpleName => qualify(n) - } if name.isTypeName then fn.toTypeName else fn.toTermName - } /** The encoded flat name of this denotation, where joined names are separated by `separator` characters. */ def flatName(using Context): Name = fullNameSeparated(FlatName) @@ -594,18 +575,17 @@ object SymDenotations { /** Make denotation not exist. * @pre `isCompleting` is false, or this is a ModuleCompleter or SymbolLoader */ - final def markAbsent()(using Context): Unit = { + final def markAbsent()(using Context): Unit = if (isCompleting) assert(myInfo.isInstanceOf[ModuleCompleter | SymbolLoader], s"Illegal call to `markAbsent()` while completing $this using completer $myInfo") myInfo = NoType - } /** Is symbol known to not exist? * @param canForce If this is true, the info may be forced to avoid a false-negative result */ @tailrec - final def isAbsent(canForce: Boolean = true)(using Context): Boolean = myInfo match { + final def isAbsent(canForce: Boolean = true)(using Context): Boolean = myInfo match case myInfo: ModuleCompleter => // Instead of completing the ModuleCompleter, we can check whether // the module class is absent, which might require less completions. @@ -619,7 +599,6 @@ object SymDenotations { (myInfo `eq` NoType) || is(Invisible) && ctx.isTyper || is(ModuleVal, butNot = Package) && moduleClass.isAbsent(canForce) - } /** Is this symbol the root class or its companion object? */ final def isRoot: Boolean = @@ -711,24 +690,22 @@ object SymDenotations { * TODO: Find a more robust way to characterize self symbols, maybe by * spending a Flag on them? */ - final def isSelfSym(using Context): Boolean = owner.infoOrCompleter match { + final def isSelfSym(using Context): Boolean = owner.infoOrCompleter match case ClassInfo(_, _, _, _, selfInfo) => selfInfo == symbol || selfInfo.isInstanceOf[Type] && name == nme.WILDCARD case _ => false - } /** Is this definition contained in `boundary`? * Same as `ownersIterator contains boundary` but more efficient. */ - final def isContainedIn(boundary: Symbol)(using Context): Boolean = { + final def isContainedIn(boundary: Symbol)(using Context): Boolean = def recur(sym: Symbol): Boolean = if (sym eq boundary) true else if (sym eq NoSymbol) false else if (sym.is(PackageClass) && !boundary.is(PackageClass)) false else recur(sym.owner) recur(symbol) - } final def isProperlyContainedIn(boundary: Symbol)(using Context): Boolean = symbol != boundary && isContainedIn(boundary) @@ -781,10 +758,9 @@ object SymDenotations { * However, a stable member might not yet be initialized (if it is an object or anyhow lazy). * So the first call to a stable member might fail and/or produce side effects. */ - final def isStableMember(using Context): Boolean = { + final def isStableMember(using Context): Boolean = def isUnstableValue = isOneOf(UnstableValueFlags) || info.isInstanceOf[ExprType] || isAllOf(InlineParam) isType || is(StableRealizable) || exists && !isUnstableValue - } /** Is this a denotation of a real class that does not have - either direct or inherited - * initialization code? @@ -873,7 +849,7 @@ object SymDenotations { * As a side effect, drop Local flags of members that are not accessed via the ThisType * of their owner. */ - final def isAccessibleFrom(pre: Type, superAccess: Boolean = false, whyNot: StringBuffer | Null = null)(using Context): Boolean = { + final def isAccessibleFrom(pre: Type, superAccess: Boolean = false, whyNot: StringBuffer | Null = null)(using Context): Boolean = /** Are we inside definition of `boundary`? * If this symbol is Java defined, package structure is interpreted to be flat. @@ -883,22 +859,20 @@ object SymDenotations { && !(is(JavaDefined) && boundary.is(PackageClass) && ctx.owner.enclosingPackageClass != boundary) /** Are we within definition of linked class of `boundary`? */ - def accessWithinLinked(boundary: Symbol) = { + def accessWithinLinked(boundary: Symbol) = val linked = boundary.linkedClass (linked ne NoSymbol) && accessWithin(linked) - } /** Is `pre` the same as C.thisThis, where C is exactly the owner of this symbol, * or, if this symbol is protected, a subclass of the owner? */ - def isCorrectThisType(pre: Type): Boolean = pre match { + def isCorrectThisType(pre: Type): Boolean = pre match case pre: ThisType => (pre.cls eq owner) || this.is(Protected) && pre.cls.derivesFrom(owner) case pre: TermRef => pre.symbol.moduleClass == owner case _ => false - } /** Is protected access to target symbol permitted? */ def isProtectedAccessOK: Boolean = @@ -923,7 +897,7 @@ object SymDenotations { if pre eq NoPrefix then true else if isAbsent() then false - else { + else val boundary = accessBoundary(owner) ( boundary.isTerm @@ -944,8 +918,6 @@ object SymDenotations { || isProtectedAccessOK ) ) - } - } /** Do members of this symbol need translation via asSeenFrom when * accessed via prefix `pre`? @@ -955,11 +927,11 @@ object SymDenotations { case pre: ThisType => pre.sameThis(thisType) case _ => false !( this.isTerm - || this.isStaticOwner && !this.seesOpaques - || ctx.erasedTypes - || (pre eq NoPrefix) - || preIsThis - ) + || this.isStaticOwner && !this.seesOpaques + || ctx.erasedTypes + || (pre eq NoPrefix) + || preIsThis + ) /** Is this symbol concrete, or that symbol deferred? */ def isAsConcrete(that: Symbol)(using Context): Boolean = @@ -1043,14 +1015,13 @@ object SymDenotations { * because the Scala library does not always follow the right conventions. * Examples are: isWhole(), toInt(), toDouble() in BigDecimal, Numeric, RichInt, ScalaNumberProxy. */ - def matchNullaryLoosely(using Context): Boolean = { + def matchNullaryLoosely(using Context): Boolean = def test(sym: Symbol) = sym.is(JavaDefined) || sym.owner == defn.AnyClass || sym == defn.Object_clone || sym.owner.is(Scala2x) this.exists && (test(symbol) || allOverriddenSymbols.exists(test)) - } // ------ access to related symbols --------------------------------- @@ -1067,62 +1038,55 @@ object SymDenotations { /** If this a module, return the corresponding class, if this is a module, return itself, * otherwise NoSymbol */ - final def moduleClass(using Context): Symbol = { - def notFound = { + final def moduleClass(using Context): Symbol = + def notFound = if (Config.showCompletions) println(s"missing module class for $name: $myInfo") NoSymbol - } if (this.is(ModuleVal)) - myInfo match { + myInfo match case info: TypeRef => info.symbol case ExprType(info: TypeRef) => info.symbol // needed after uncurry, when module terms might be accessor defs case info: LazyType => info.moduleClass case t: MethodType => - t.resultType match { + t.resultType match case info: TypeRef => info.symbol case _ => notFound - } case _ => notFound - } else if (this.is(ModuleClass)) symbol else NoSymbol - } /** If this a module class, return the corresponding module, if this is a module, return itself, * otherwise NoSymbol */ final def sourceModule(using Context): Symbol = if (this.is(ModuleClass)) - myInfo match { + myInfo match case ClassInfo(_, _, _, _, selfType) => - def sourceOfSelf(tp: TypeOrSymbol): Symbol = (tp: @unchecked) match { + def sourceOfSelf(tp: TypeOrSymbol): Symbol = (tp: @unchecked) match case tp: TermRef => tp.symbol case tp: Symbol => sourceOfSelf(tp.info) case tp: RefinedType => sourceOfSelf(tp.parent) case tp: AnnotatedType => sourceOfSelf(tp.parent) case tp: ThisType => tp.cls - } sourceOfSelf(selfType) case info: LazyType => info.sourceModule case _ => NoSymbol - } else if (this.is(ModuleVal)) symbol else NoSymbol /** The field accessed by this getter or setter, or if it does not exist, the getter */ - def accessedFieldOrGetter(using Context): Symbol = { + def accessedFieldOrGetter(using Context): Symbol = val fieldName = if (isSetter) name.asTermName.getterName else name val d = owner.info.decl(fieldName) val field = d.suchThat(!_.is(Method)).symbol def getter = d.suchThat(_.info.isParameterless).symbol field orElse getter - } /** The field accessed by a getter or setter, or * if it does not exists, the getter of a setter, or @@ -1132,15 +1096,13 @@ object SymDenotations { if (is(Accessor)) accessedFieldOrGetter orElse symbol else symbol /** The chain of owners of this denotation, starting with the denoting symbol itself */ - final def ownersIterator(using Context): Iterator[Symbol] = new Iterator[Symbol] { + final def ownersIterator(using Context): Iterator[Symbol] = new Iterator[Symbol]: private var current = symbol def hasNext = current.exists - def next: Symbol = { + def next: Symbol = val result = current current = current.owner result - } - } /** If this is a weak owner, its owner, otherwise the denoting symbol. */ final def skipWeakOwner(using Context): Symbol = @@ -1160,8 +1122,8 @@ object SymDenotations { * Note, that as packages have ClassSymbols, top level classes will have an `enclosingClass` * with Package flag set. */ - final def enclosingClass(using Context): Symbol = { - def enclClass(sym: Symbol, skip: Boolean): Symbol = { + final def enclosingClass(using Context): Symbol = + def enclClass(sym: Symbol, skip: Boolean): Symbol = def newSkip = sym.is(JavaStaticTerm) if (!sym.exists) NoSymbol @@ -1169,9 +1131,7 @@ object SymDenotations { if (skip) enclClass(sym.owner, newSkip) else sym else enclClass(sym.owner, skip || newSkip) - } enclClass(symbol, false) - } /** Skips symbol that are not owned by a class */ def skipLocalOwners(using Context): Symbol = @@ -1207,10 +1167,9 @@ object SymDenotations { || isClass && hasAnnotation(defn.TransparentTraitAnnot) /** The class containing this denotation which has the given effective name. */ - final def enclosingClassNamed(name: Name)(using Context): Symbol = { + final def enclosingClassNamed(name: Name)(using Context): Symbol = val cls = enclosingClass if (cls.effectiveName == name || !cls.exists) cls else cls.owner.enclosingClassNamed(name) - } /** The closest enclosing method containing this definition. * A local dummy owner is mapped to the primary constructor of the class. @@ -1232,14 +1191,13 @@ object SymDenotations { /** The top-level class containing this denotation, * except for a toplevel module, where its module class is returned. */ - final def topLevelClass(using Context): Symbol = { + final def topLevelClass(using Context): Symbol = @tailrec def topLevel(d: SymDenotation): Symbol = if (d.isTopLevelClass) d.symbol else topLevel(d.owner) val sym = topLevel(this) if (sym.isClass) sym else sym.moduleClass - } final def isTopLevelClass(using Context): Boolean = !this.exists || this.isEffectiveRoot || this.is(PackageClass) || this.owner.is(PackageClass) @@ -1330,15 +1288,13 @@ object SymDenotations { /** The alias of an opaque type alias that's stored in the self type of the * containing object. */ - def opaqueAlias(using Context): Type = { - def recur(tp: Type): Type = tp match { + def opaqueAlias(using Context): Type = + def recur(tp: Type): Type = tp match case RefinedType(parent, rname, TypeAlias(alias)) => if rname == name then alias.stripLazyRef else recur(parent) case _ => NoType - } recur(owner.asClass.givenSelfType) - } /** The non-private symbol whose name and type matches the type of this symbol * in the given class. @@ -1349,21 +1305,19 @@ object SymDenotations { * * site: Subtype of both inClass and C */ - final def matchingDecl(inClass: Symbol, site: Type)(using Context): Symbol = { + final def matchingDecl(inClass: Symbol, site: Type)(using Context): Symbol = var denot = inClass.info.nonPrivateDecl(name) if (denot.isTerm) // types of the same name always match denot = denot.matchingDenotation(site, site.memberInfo(symbol), symbol.targetName) denot.symbol - } /** The non-private member of `site` whose name and type matches the type of this symbol */ - final def matchingMember(site: Type)(using Context): Symbol = { + final def matchingMember(site: Type)(using Context): Symbol = var denot = site.nonPrivateMember(name) if (denot.isTerm) // types of the same name always match denot = denot.matchingDenotation(site, site.memberInfo(symbol), symbol.targetName) denot.symbol - } /** If false, this symbol cannot possibly participate in an override, * either as overrider or overridee. @@ -1388,13 +1342,12 @@ object SymDenotations { else overriddenFromType(owner.info) /** Equivalent to `allOverriddenSymbols.headOption.getOrElse(NoSymbol)` but more efficient. */ - final def nextOverriddenSymbol(using Context): Symbol = { + final def nextOverriddenSymbol(using Context): Symbol = val overridden = allOverriddenSymbols if (overridden.hasNext) overridden.next else NoSymbol - } /** Returns all matching symbols defined in parents of the selftype. */ final def extendedOverriddenSymbols(using Context): Iterator[Symbol] = @@ -1402,10 +1355,9 @@ object SymDenotations { else overriddenFromType(owner.asClass.classInfo.selfType) private def overriddenFromType(tp: Type)(using Context): Iterator[Symbol] = - tp.baseClasses match { + tp.baseClasses match case _ :: inherited => inherited.iterator.map(overriddenSymbol(_)).filter(_.exists) case Nil => Iterator.empty - } /** The symbol overriding this symbol in given subclass `inClass`. * @@ -1419,17 +1371,15 @@ object SymDenotations { * seen from class `base`. This symbol is always concrete. * pre: `this.owner` is in the base class sequence of `base`. */ - final def superSymbolIn(base: Symbol)(using Context): Symbol = { - @tailrec def loop(bcs: List[ClassSymbol]): Symbol = bcs match { + final def superSymbolIn(base: Symbol)(using Context): Symbol = + @tailrec def loop(bcs: List[ClassSymbol]): Symbol = bcs match case bc :: bcs1 => val sym = matchingDecl(bcs.head, base.thisType) .suchThat(alt => !alt.is(Deferred)).symbol if (sym.exists) sym else loop(bcs.tail) case _ => NoSymbol - } loop(base.info.baseClasses.dropWhile(owner != _).tail) - } /** A member of class `base` is incomplete if * (1) it is declared deferred or @@ -1438,10 +1388,9 @@ object SymDenotations { */ @tailrec final def isIncompleteIn(base: Symbol)(using Context): Boolean = this.is(Deferred) || - this.is(AbsOverride) && { + this.is(AbsOverride) `&&`: val supersym = superSymbolIn(base) supersym == NoSymbol || supersym.isIncompleteIn(base) - } /** The class or term symbol up to which this symbol is accessible, * or RootClass if it is public. As java protected statics are @@ -1465,18 +1414,16 @@ object SymDenotations { /** The current declaration in this symbol's class owner that has the same name * as this one, and, if there are several, also has the same signature. */ - def currentSymbol(using Context): Symbol = { + def currentSymbol(using Context): Symbol = val candidates = owner.info.decls.lookupAll(name) def test(sym: Symbol): Symbol = if (sym == symbol || sym.signature == signature) sym else if (candidates.hasNext) test(candidates.next) else NoSymbol - if (candidates.hasNext) { + if (candidates.hasNext) val sym = candidates.next if (candidates.hasNext) test(sym) else sym - } else NoSymbol - } // ----- type-related ------------------------------------------------ @@ -1508,7 +1455,7 @@ object SymDenotations { * This is required to avoid owner crash in ExplicitOuter. * See tests/pos/i10769.scala */ - def reachableTypeRef(using Context) = + def reachableTypeRef(using Context) = TypeRef(owner.reachablePrefix, symbol) /** The reachable typeRef with wildcard arguments for each type parameter */ @@ -1586,7 +1533,7 @@ object SymDenotations { def debugString: String = toString + "#" + symbol.id // !!! DEBUG - def hasSkolems(tp: Type): Boolean = tp match { + def hasSkolems(tp: Type): Boolean = tp match case tp: SkolemType => true case tp: NamedType => hasSkolems(tp.prefix) case tp: RefinedType => hasSkolems(tp.parent) || hasSkolems(tp.refinedInfo) @@ -1599,7 +1546,6 @@ object SymDenotations { case tp: AndOrType => hasSkolems(tp.tp1) || hasSkolems(tp.tp2) case tp: AnnotatedType => hasSkolems(tp.parent) case _ => false - } def assertNoSkolems(tp: Type): Unit = if (!this.isSkolem) @@ -1623,7 +1569,7 @@ object SymDenotations { privateWithin: Symbol | Null = null, annotations: List[Annotation] | Null = null, rawParamss: List[List[Symbol]] | Null = null)( - using Context): SymDenotation = { + using Context): SymDenotation = // simulate default parameters, while also passing implicit context ctx to the default values val initFlags1 = (if (initFlags != UndefinedFlags) initFlags else this.flags) val info1 = if (info != null) info else this.info @@ -1637,7 +1583,6 @@ object SymDenotations { d.rawParamss = rawParamss1 d.registeredCompanion = registeredCompanion d - } /** Copy mamberNames and baseData caches from given denotation, provided * they are valid at given `phase`. @@ -1648,14 +1593,12 @@ object SymDenotations { * @param completersMatter if `true`, consider parents changed if `info1` or `info2 `is a type completer */ protected def changedClassParents(info1: Type | Null, info2: Type | Null, completersMatter: Boolean): Boolean = - info2 match { + info2 match case info2: ClassInfo => - info1 match { + info1 match case info1: ClassInfo => info1.declaredParents ne info2.declaredParents case _ => completersMatter - } case _ => completersMatter - } override def initial: SymDenotation = super.initial.asSymDenotation @@ -1772,7 +1715,6 @@ object SymDenotations { /** Same as `sealedStrictDescendants` but prepends this symbol as well. */ final def sealedDescendants(using Context): List[Symbol] = this.symbol :: sealedStrictDescendants - } /** The contents of a class definition during a period */ @@ -1783,7 +1725,7 @@ object SymDenotations { initFlags: FlagSet, initInfo: Type, initPrivateWithin: Symbol) - extends SymDenotation(symbol, maybeOwner, name, initFlags, initInfo, initPrivateWithin) { + extends SymDenotation(symbol, maybeOwner, name, initFlags, initInfo, initPrivateWithin): import util.EqHashMap @@ -1803,35 +1745,29 @@ object SymDenotations { private var baseDataCache: BaseData = BaseData.None private var memberNamesCache: MemberNames = MemberNames.None - private def memberCache(using Context): EqHashMap[Name, PreDenotation] = { - if (myMemberCachePeriod != ctx.period) { + private def memberCache(using Context): EqHashMap[Name, PreDenotation] = + if (myMemberCachePeriod != ctx.period) myMemberCache = EqHashMap() myMemberCachePeriod = ctx.period - } myMemberCache.nn - } - private def baseTypeCache(using Context): BaseTypeMap = { + private def baseTypeCache(using Context): BaseTypeMap = if !currentHasSameBaseTypesAs(myBaseTypeCachePeriod) then myBaseTypeCache = new BaseTypeMap() myBaseTypeCachePeriod = ctx.period myBaseTypeCache.nn - } - private def invalidateBaseDataCache() = { + private def invalidateBaseDataCache() = baseDataCache.invalidate() baseDataCache = BaseData.None - } - private def invalidateMemberNamesCache() = { + private def invalidateMemberNamesCache() = memberNamesCache.invalidate() memberNamesCache = MemberNames.None - } - def invalidateBaseTypeCache(): Unit = { + def invalidateBaseTypeCache(): Unit = myBaseTypeCache = null myBaseTypeCachePeriod = Nowhere - } def invalidateMemberCaches()(using Context): Unit = myMemberCachePeriod = Nowhere @@ -1845,18 +1781,15 @@ object SymDenotations { val outerCache = sym.owner.owner.asClass.classDenot.myMemberCache if outerCache != null then outerCache.remove(sym.name) - override def copyCaches(from: SymDenotation, phase: Phase)(using Context): this.type = { - from match { + override def copyCaches(from: SymDenotation, phase: Phase)(using Context): this.type = + from match case from: ClassDenotation => if (from.memberNamesCache.isValidAt(phase)) memberNamesCache = from.memberNamesCache - if (from.baseDataCache.isValidAt(phase)) { + if (from.baseDataCache.isValidAt(phase)) baseDataCache = from.baseDataCache myBaseTypeCache = from.baseTypeCache - } case _ => - } this - } // ----- denotation fields and accessors ------------------------------ @@ -1879,28 +1812,24 @@ object SymDenotations { sym.is(TypeParam) && sym.owner == symbol).asInstanceOf[List[TypeSymbol]] /** The type parameters of this class */ - override final def typeParams(using Context): List[TypeSymbol] = { + override final def typeParams(using Context): List[TypeSymbol] = if (myTypeParams == null) myTypeParams = if (ctx.erasedTypes || is(Module)) Nil // fast return for modules to avoid scanning package decls - else { + else val di = initial if (this ne di) di.typeParams - else infoOrCompleter match { + else infoOrCompleter match case info: TypeParamsCompleter => info.completerTypeParams(symbol) case _ => typeParamsFromDecls - } - } myTypeParams.nn - } - override protected[dotc] final def info_=(tp: Type): Unit = { + override protected[dotc] final def info_=(tp: Type): Unit = if (changedClassParents(infoOrCompleter, tp, completersMatter = true)) invalidateBaseDataCache() invalidateMemberNamesCache() myTypeParams = null // changing the info might change decls, and with it typeParams super.info_=(tp) - } /** The types of the parent classes. */ def parentTypes(using Context): List[Type] = info match @@ -1921,10 +1850,9 @@ object SymDenotations { /** The explicitly given self type (self types of modules are assumed to be * explcitly given here). */ - def givenSelfType(using Context): Type = classInfo.selfInfo match { + def givenSelfType(using Context): Type = classInfo.selfInfo match case tp: Type => tp case self: Symbol => self.info - } // ------ class-specific operations ----------------------------------- @@ -1935,30 +1863,26 @@ object SymDenotations { * - for a module class `m`: A term ref to m's source module. * - for all other classes `c` with owner `o`: ThisType(TypeRef(o.thisType, c)) */ - override def thisType(using Context): Type = { + override def thisType(using Context): Type = if (myThisType == null) myThisType = computeThisType myThisType.nn - } - private def computeThisType(using Context): Type = { + private def computeThisType(using Context): Type = val cls = symbol.asType val pre = if (this.is(Package)) NoPrefix else owner.thisType ThisType.raw(TypeRef(pre, cls)) - } private var myTypeRef: TypeRef | Null = null - override def typeRef(using Context): TypeRef = { + override def typeRef(using Context): TypeRef = if (myTypeRef == null) myTypeRef = super.typeRef myTypeRef.nn - } override def appliedRef(using Context): Type = classInfo.appliedRef - private def baseData(implicit onBehalf: BaseData, ctx: Context): (List[ClassSymbol], BaseClassSet) = { + private def baseData(implicit onBehalf: BaseData, ctx: Context): (List[ClassSymbol], BaseClassSet) = if (!baseDataCache.isValid) baseDataCache = BaseData.newCache() baseDataCache(this) - } /** The base classes of this class in linearization order, * with the class itself as first element. @@ -1975,25 +1899,22 @@ object SymDenotations { private def baseClassSet(implicit onBehalf: BaseData, ctx: Context): BaseClassSet = baseData._2 - def computeBaseData(implicit onBehalf: BaseData, ctx: Context): (List[ClassSymbol], BaseClassSet) = { + def computeBaseData(implicit onBehalf: BaseData, ctx: Context): (List[ClassSymbol], BaseClassSet) = def emptyParentsExpected = is(Package) || (symbol == defn.AnyClass) || ctx.erasedTypes && (symbol == defn.ObjectClass) val parents = parentTypes if (parents.isEmpty && !emptyParentsExpected) onBehalf.signalProvisional() val builder = new BaseDataBuilder - def traverse(parents: List[Type]): Unit = parents match { + def traverse(parents: List[Type]): Unit = parents match case p :: parents1 => - p.classSymbol match { + p.classSymbol match case pcls: ClassSymbol => builder.addAll(pcls.baseClasses) case _ => assert(isRefinementClass || p.isError || ctx.mode.is(Mode.Interactive), s"$this has non-class parent: $p") - } traverse(parents1) case nil => - } traverse(parents) (classSymbol :: builder.baseClasses, builder.baseClassSet) - } final override def derivesFrom(base: Symbol)(using Context): Boolean = !isAbsent() && @@ -2045,18 +1966,15 @@ object SymDenotations { * @param scope The scope in which symbol should be entered. * If this is EmptyScope, the scope is `decls`. */ - def enter(sym: Symbol, scope: Scope = EmptyScope)(using Context): Unit = { - val mscope = scope match { + def enter(sym: Symbol, scope: Scope = EmptyScope)(using Context): Unit = + val mscope = scope match case scope: MutableScope => scope case _ => unforcedDecls.openForMutations - } - if (proceedWithEnter(sym, mscope)) { + if (proceedWithEnter(sym, mscope)) enterNoReplace(sym, mscope) val nxt = this.nextInRun if (nxt.validFor.code > this.validFor.code) this.nextInRun.asSymDenotation.asClass.enter(sym) - } - } /** Enter a symbol in given `scope` without potentially replacing the old copy. */ def enterNoReplace(sym: Symbol, scope: MutableScope)(using Context): Unit = @@ -2067,37 +1985,33 @@ object SymDenotations { * If `prev` is not defined in current class, do nothing. * @pre `prev` and `replacement` have the same name. */ - def replace(prev: Symbol, replacement: Symbol)(using Context): Unit = { + def replace(prev: Symbol, replacement: Symbol)(using Context): Unit = unforcedDecls.openForMutations.replace(prev, replacement) if (myMemberCache != null) myMemberCache.uncheckedNN.remove(replacement.name) - } /** Delete symbol from current scope. * Note: We require that this does not happen after the first time * someone does a findMember on a subclass. */ - def delete(sym: Symbol)(using Context): Unit = { + def delete(sym: Symbol)(using Context): Unit = val scope = info.decls.openForMutations scope.unlink(sym, sym.name) if sym.name != sym.originalName then scope.unlink(sym, sym.originalName) if (myMemberCache != null) myMemberCache.uncheckedNN.remove(sym.name) if (!sym.flagsUNSAFE.is(Private)) invalidateMemberNamesCache() - } /** Make sure the type parameters of this class appear in the order given * by `typeParams` in the scope of the class. Reorder definitions in scope if necessary. */ - def ensureTypeParamsInCorrectOrder()(using Context): Unit = { + def ensureTypeParamsInCorrectOrder()(using Context): Unit = val tparams = typeParams - if (!ctx.erasedTypes && !typeParamsFromDecls.corresponds(tparams)(_.name == _.name)) { + if (!ctx.erasedTypes && !typeParamsFromDecls.corresponds(tparams)(_.name == _.name)) val decls = info.decls val decls1 = newScope for (tparam <- typeParams) decls1.enter(decls.lookup(tparam.name)) for (sym <- decls) if (!tparams.contains(sym)) decls1.enter(sym) info = classInfo.derivedClassInfo(decls = decls1) myTypeParams = null - } - } /** All members of this class that have the given name. * The elements of the returned pre-denotation all @@ -2170,59 +2084,52 @@ object SymDenotations { membersNamedNoShadowingBasedOnFlags(name, required, excluded).asSeenFrom(pre).toDenot(pre) /** Compute tp.baseType(this) */ - final def baseTypeOf(tp: Type)(using Context): Type = { + final def baseTypeOf(tp: Type)(using Context): Type = val btrCache = baseTypeCache def inCache(tp: Type) = tp match case tp: CachedType => btrCache.contains(tp) case _ => false - def record(tp: CachedType, baseTp: Type) = { - if (Stats.monitored) { + def record(tp: CachedType, baseTp: Type) = + if (Stats.monitored) Stats.record("basetype cache entries") if (!baseTp.exists) Stats.record("basetype cache NoTypes") - } if (!tp.isProvisional && !CapturingType.isUncachable(tp)) btrCache(tp) = baseTp else btrCache.remove(tp) // Remove any potential sentinel value - } - def ensureAcyclic(baseTp: Type) = { + def ensureAcyclic(baseTp: Type) = if (baseTp `eq` NoPrefix) throw CyclicReference(this) baseTp - } - def recur(tp: Type): Type = try { - tp match { + def recur(tp: Type): Type = try + tp match case tp: CachedType => val baseTp: Type | Null = btrCache.lookup(tp) if (baseTp != null) return ensureAcyclic(baseTp) case _ => - } - if (Stats.monitored) { + if (Stats.monitored) Stats.record("computeBaseType, total") Stats.record(s"computeBaseType, ${tp.getClass}") - } val normed = tp.tryNormalize if (normed.exists) return recur(normed) - tp match { + tp match case tp @ TypeRef(prefix, _) => - def foldGlb(bt: Type, ps: List[Type]): Type = ps match { + def foldGlb(bt: Type, ps: List[Type]): Type = ps match case p :: ps1 => foldGlb(bt & recur(p), ps1) case _ => bt - } - def computeTypeRef = { + def computeTypeRef = btrCache(tp) = NoPrefix val tpSym = tp.symbol - tpSym.denot match { + tpSym.denot match case clsd: ClassDenotation => - def isOwnThis = prefix match { + def isOwnThis = prefix match case prefix: ThisType => prefix.cls `eq` clsd.owner case NoPrefix => true case _ => false - } val baseTp = if (tpSym eq symbol) tp @@ -2243,12 +2150,10 @@ object SymDenotations { else btrCache.remove(tp) baseTp - } - } computeTypeRef case tp @ AppliedType(tycon, args) => - def computeApplied = { + def computeApplied = btrCache(tp) = NoPrefix val baseTp = if (tycon.typeSymbol eq symbol) && !tycon.isLambdaSub then tp @@ -2259,7 +2164,6 @@ object SymDenotations { recur(tycon).substApprox(tparams, args) record(tp, baseTp) baseTp - } computeApplied case tp: TypeParamRef => // uncachable, since baseType depends on context bounds @@ -2269,16 +2173,14 @@ object SymDenotations { tp.derivedCapturingType(recur(parent), refs) case tp: TypeProxy => - def computeTypeProxy = { + def computeTypeProxy = val superTp = tp.superType val baseTp = recur(superTp) - tp match { + tp match case tp: CachedType if baseTp.exists && inCache(superTp) => record(tp, baseTp) case _ => - } baseTp - } computeTypeProxy case tp: AndOrType => @@ -2310,35 +2212,29 @@ object SymDenotations { case _ => NoType - } - } - catch { + catch case ex: Throwable => tp match case tp: CachedType => btrCache.remove(tp) case _ => throw ex - } - trace.onDebug(s"$tp.baseType($this)") { + trace.onDebug(s"$tp.baseType($this)"): Stats.record("baseTypeOf") recur(tp) - } - } def memberNames(keepOnly: NameFilter)(implicit onBehalf: MemberNames, ctx: Context): Set[Name] = if (this.is(PackageClass) || !Config.cacheMemberNames) computeMemberNames(keepOnly) // don't cache package member names; they might change - else { + else if (!memberNamesCache.isValid) memberNamesCache = MemberNames.newCache() memberNamesCache(keepOnly, this) - } - def computeMemberNames(keepOnly: NameFilter)(implicit onBehalf: MemberNames, ctx: Context): Set[Name] = { + def computeMemberNames(keepOnly: NameFilter)(implicit onBehalf: MemberNames, ctx: Context): Set[Name] = var names = Set[Name]() def maybeAdd(name: Name) = if (keepOnly(thisType, name)) names += name - try { + try for ptype <- parentTypes do ptype.classSymbol match case pcls: ClassSymbol => @@ -2352,32 +2248,26 @@ object SymDenotations { else info.decls.iterator for (sym <- ownSyms) maybeAdd(sym.name) names - } - catch { + catch case ex: Throwable => handleRecursive("member names", i"of $this", ex) - } - } - override final def fullNameSeparated(kind: QualifiedNameKind)(using Context): Name = { + override final def fullNameSeparated(kind: QualifiedNameKind)(using Context): Name = val cached = fullNameCache(kind) if (cached != null) cached - else { + else val fn = super.fullNameSeparated(kind) fullNameCache = fullNameCache.updated(kind, fn) fn - } - } // to avoid overloading ambiguities override def fullName(using Context): Name = super.fullName - override def primaryConstructor(using Context): Symbol = { + override def primaryConstructor(using Context): Symbol = def constrNamed(cname: TermName) = info.decls.denotsNamed(cname).last.symbol // denotsNamed returns Symbols in reverse order of occurrence if (this.is(Package)) NoSymbol else constrNamed(nme.CONSTRUCTOR).orElse(constrNamed(nme.TRAIT_CONSTRUCTOR)) - } /** The term parameter accessors of this class. * Both getters and setters are returned in this list. @@ -2394,16 +2284,14 @@ object SymDenotations { */ def ensureFreshScopeAfter(phase: DenotTransformer)(using Context): Unit = if (ctx.phaseId != phase.next.id) atPhase(phase.next)(ensureFreshScopeAfter(phase)) - else { - val prevClassInfo = atPhase(phase) { + else + val prevClassInfo = atPhase(phase): current.asInstanceOf[ClassDenotation].classInfo - } val ClassInfo(pre, _, ps, decls, selfInfo) = classInfo if (prevClassInfo.decls eq decls) copySymDenotation(info = ClassInfo(pre, classSymbol, ps, decls.cloneScope, selfInfo)) .copyCaches(this, phase.next) .installAfter(phase) - } private var myCompanion: Symbol = NoSymbol @@ -2421,7 +2309,6 @@ object SymDenotations { override def registeredCompanion_=(c: Symbol) = myCompanion = c - } /** The denotation of a package class. * It overrides ClassDenotation to take account of package objects when looking for members @@ -2433,43 +2320,37 @@ object SymDenotations { initFlags: FlagSet, initInfo: Type, initPrivateWithin: Symbol) - extends ClassDenotation(symbol, ownerIfExists, name, initFlags, initInfo, initPrivateWithin) { + extends ClassDenotation(symbol, ownerIfExists, name, initFlags, initInfo, initPrivateWithin): private var packageObjsCache: List[ClassDenotation] = _ private var packageObjsRunId: RunId = NoRunId private var ambiguityWarningIssued: Boolean = false /** The package objects in this class */ - def packageObjs(using Context): List[ClassDenotation] = { - if (packageObjsRunId != ctx.runId) { + def packageObjs(using Context): List[ClassDenotation] = + if (packageObjsRunId != ctx.runId) packageObjsRunId = ctx.runId packageObjsCache = Nil // break cycle in case we are looking for package object itself - packageObjsCache = { + packageObjsCache = val pkgObjBuf = new mutable.ListBuffer[ClassDenotation] - for (sym <- info.decls) { // don't use filter, since that loads classes with `$`s in their name + for (sym <- info.decls) // don't use filter, since that loads classes with `$`s in their name val denot = sym.lastKnownDenotation // don't use `sym.denot`, as this brings forward classes too early if (denot.isType && denot.name.isPackageObjectName) pkgObjBuf += sym.asClass.classDenot - } pkgObjBuf.toList - } - } packageObjsCache - } /** The package object (as a term symbol) in this package that might contain * `sym` as a member. */ - def packageObjFor(sym: Symbol)(using Context): Symbol = { + def packageObjFor(sym: Symbol)(using Context): Symbol = val owner = sym.maybeOwner if (owner.is(Package)) NoSymbol else if (owner.isPackageObject) owner.sourceModule else // owner could be class inherited by package object (until package object inheritance is removed) - packageObjs.find(_.name == packageTypeName) match { + packageObjs.find(_.name == packageTypeName) match case Some(pobj) => pobj.sourceModule case _ => NoSymbol - } - } /** Looks in both the package object and the package for members. The precise algorithm * is as follows: @@ -2516,7 +2397,7 @@ object SymDenotations { // if a symbol does not have an associated file, assume it is defined // in the current run anyway. This is true for packages, and also can happen for pickling and // from-tasty tests that generate a fresh symbol and then re-use it in the next run. - ) + ) if compiledNow.exists then compiledNow else val assocFiles = multi.aggregate(d => Set(d.symbol.associatedFile.nn), _ union _) @@ -2561,43 +2442,37 @@ object SymDenotations { end computeMembersNamed /** The union of the member names of the package and the package object */ - override def memberNames(keepOnly: NameFilter)(implicit onBehalf: MemberNames, ctx: Context): Set[Name] = { - def recur(pobjs: List[ClassDenotation], acc: Set[Name]): Set[Name] = pobjs match { + override def memberNames(keepOnly: NameFilter)(implicit onBehalf: MemberNames, ctx: Context): Set[Name] = + def recur(pobjs: List[ClassDenotation], acc: Set[Name]): Set[Name] = pobjs match case pcls :: pobjs1 => recur(pobjs1, acc.union(pcls.memberNames(keepOnly))) case nil => acc - } recur(packageObjs, super.memberNames(keepOnly)) - } /** If another symbol with the same name is entered, unlink it. * If symbol is a package object, invalidate the packageObj cache. * @return `sym` is not already entered */ - override def proceedWithEnter(sym: Symbol, mscope: MutableScope)(using Context): Boolean = { + override def proceedWithEnter(sym: Symbol, mscope: MutableScope)(using Context): Boolean = val entry = mscope.lookupEntry(sym.name) - if (entry != null) { + if (entry != null) if (entry.sym == sym) return false mscope.unlink(entry) - } if (sym.name.isPackageObjectName) packageObjsRunId = NoRunId true - } /** Unlink all package members defined in `file` in a previous run. */ - def unlinkFromFile(file: AbstractFile)(using Context): Unit = { + def unlinkFromFile(file: AbstractFile)(using Context): Unit = val scope = unforcedDecls.openForMutations for (sym <- scope.toList.iterator) // We need to be careful to not force the denotation of `sym` here, // otherwise it will be brought forward to the current run. if (sym.defRunId != ctx.runId && sym.isClass && sym.asClass.assocFile == file) scope.unlink(sym, sym.lastKnownDenotation.name) - } - } @sharable object NoDenotation - extends SymDenotation(NoSymbol, NoSymbol, "".toTermName, Permanent, NoType) { + extends SymDenotation(NoSymbol, NoSymbol, "".toTermName, Permanent, NoType): override def isTerm: Boolean = false override def exists: Boolean = false override def owner: Symbol = throw new AssertionError("NoDenotation.owner") @@ -2614,7 +2489,6 @@ object SymDenotations { NoSymbol.denot = this validFor = Period.allInRun(NoRunId) - } /** Can a private symbol with given name and flags be inferred to be local, * if all references to such symbols are via `this`? @@ -2636,7 +2510,7 @@ object SymDenotations { name: Name, initFlags: FlagSet, initInfo: Type, - initPrivateWithin: Symbol = NoSymbol)(using Context): SymDenotation = { + initPrivateWithin: Symbol = NoSymbol)(using Context): SymDenotation = val result = if (symbol.isClass) if (initFlags.is(Package)) new PackageClassDenotation(symbol, owner, name, initFlags, initInfo, initPrivateWithin) @@ -2644,11 +2518,10 @@ object SymDenotations { else new SymDenotation(symbol, owner, name, initFlags, initInfo, initPrivateWithin) result.validFor = currentStablePeriod result - } def stillValid(denot: SymDenotation)(using Context): Boolean = if (denot.isOneOf(ValidForeverFlags) || denot.isRefinementClass || denot.isImport) true - else { + else val initial = denot.initial val firstPhaseId = initial.validFor.firstPhaseId.max(typerPhase.id) @@ -2658,7 +2531,6 @@ object SymDenotations { atPhase(firstPhaseId)(stillValidInOwner(initial)) else stillValidInOwner(denot) - } private[SymDenotations] def stillValidInOwner(denot: SymDenotation)(using Context): Boolean = try val owner = denot.maybeOwner.denot @@ -2674,13 +2546,12 @@ object SymDenotations { catch case ex: StaleSymbol => false /** Explain why symbol is invalid; used for debugging only */ - def traceInvalid(denot: Denotation)(using Context): Boolean = { + def traceInvalid(denot: Denotation)(using Context): Boolean = def show(d: Denotation) = s"$d#${d.symbol.id}" - def explain(msg: String) = { + def explain(msg: String) = println(s"${show(denot)} is invalid at ${ctx.period} because $msg") false - } - denot match { + denot match case denot: SymDenotation => def explainSym(msg: String) = explain(s"$msg\ndefined = ${denot.definedPeriodsString}") if (denot.isOneOf(ValidForeverFlags) || denot.isRefinementClass) true @@ -2688,20 +2559,16 @@ object SymDenotations { val initial = denot.initial if ((initial ne denot) || ctx.phaseId != initial.validFor.firstPhaseId) atPhase(initial.validFor.firstPhaseId)(traceInvalid(initial)) - else try { + else try val owner = denot.owner.denot if (!traceInvalid(owner)) explainSym("owner is invalid") else if (!owner.isClass || owner.isRefinementClass || denot.isSelfSym) true else if (owner.unforcedDecls.lookupAll(denot.name) contains denot.symbol) true else explainSym(s"decls of ${show(owner)} are ${owner.unforcedDecls.lookupAll(denot.name).toList}, do not contain ${denot.symbol}") - } - catch { + catch case ex: StaleSymbol => explainSym(s"$ex was thrown") - } case _ => explain("denotation is not a SymDenotation") - } - } /** Configurable: Accept stale symbol with warning if in IDE * Always accept stale symbols when testing pickling. @@ -2712,10 +2579,9 @@ object SymDenotations { /** Possibly accept stale symbol with warning if in IDE */ def acceptStale(denot: SingleDenotation)(using Context): Boolean = - staleOK && { + staleOK `&&`: report.debugwarn(denot.staleSymbolMsg) true - } // ---- Completion -------------------------------------------------------- @@ -2774,15 +2640,13 @@ object SymDenotations { /** A subtrait of LazyTypes where completerTypeParams yields a List[TypeSymbol], which * should be completed independently of the info. */ - trait TypeParamsCompleter extends LazyType { + trait TypeParamsCompleter extends LazyType: override def completerTypeParams(sym: Symbol)(using Context): List[TypeSymbol] = unsupported("completerTypeParams") // should be abstract, but Scala-2 will then compute the wrong type for it - } /** A missing completer */ - trait NoCompleter extends LazyType { + trait NoCompleter extends LazyType: def complete(denot: SymDenotation)(using Context): Unit = unsupported("complete") - } @sharable object NoCompleter extends NoCompleter @@ -2791,9 +2655,9 @@ object SymDenotations { * Completion of modules is always completion of the underlying * module class, followed by copying the relevant fields to the module. */ - class ModuleCompleter(_moduleClass: ClassSymbol) extends LazyType { + class ModuleCompleter(_moduleClass: ClassSymbol) extends LazyType: override def moduleClass(using Context): ClassSymbol = _moduleClass - def complete(denot: SymDenotation)(using Context): Unit = { + def complete(denot: SymDenotation)(using Context): Unit = val from = moduleClass.denot.asClass denot.setFlag(from.flags.toTermFlags & RetainedModuleValFlags) denot.annotations = from.annotations filter (_.appliesToModule) @@ -2803,35 +2667,29 @@ object SymDenotations { // class and the module. denot.info = moduleClass.typeRef denot.setPrivateWithin(from.privateWithin) - } - } /** A completer for missing references */ - class StubInfo() extends LazyType { + class StubInfo() extends LazyType: - def initializeToDefaults(denot: SymDenotation, errMsg: Message)(using Context): Unit = { - denot.info = denot match { + def initializeToDefaults(denot: SymDenotation, errMsg: Message)(using Context): Unit = + denot.info = denot match case denot: ClassDenotation => ClassInfo(denot.owner.thisType, denot.classSymbol, Nil, EmptyScope) case _ => ErrorType(errMsg) - } denot.setPrivateWithin(NoSymbol) - } - def complete(denot: SymDenotation)(using Context): Unit = { + def complete(denot: SymDenotation)(using Context): Unit = val sym = denot.symbol val errMsg = BadSymbolicReference(denot) report.error(errMsg, sym.srcPos) if (ctx.debug) throw new scala.Error() initializeToDefaults(denot, errMsg) - } - } // ---- Caches for inherited info ----------------------------------------- /** Base trait for caches that keep info dependent on inherited classes */ - trait InheritedCache { + trait InheritedCache: /** Is the cache valid in current period? */ def isValid(using Context): Boolean @@ -2841,57 +2699,47 @@ object SymDenotations { /** Render invalid this cache and all caches that depend on it */ def invalidate(): Unit - } /** A cache for sets of member names, indexed by a NameFilter */ - trait MemberNames extends InheritedCache { + trait MemberNames extends InheritedCache: def apply(keepOnly: NameFilter, clsd: ClassDenotation) (implicit onBehalf: MemberNames, ctx: Context): Set[Name] - } - object MemberNames { - implicit val None: MemberNames = new InvalidCache with MemberNames { + object MemberNames: + implicit val None: MemberNames = new InvalidCache with MemberNames: def apply(keepOnly: NameFilter, clsd: ClassDenotation)(implicit onBehalf: MemberNames, ctx: Context) = ??? - } def newCache()(using Context): MemberNames = new MemberNamesImpl(ctx.period) - } /** A cache for baseclasses, as a sequence in linearization order and as a set that * can be queried efficiently for containment. */ - trait BaseData extends InheritedCache { + trait BaseData extends InheritedCache: def apply(clsd: ClassDenotation) (implicit onBehalf: BaseData, ctx: Context): (List[ClassSymbol], BaseClassSet) def signalProvisional(): Unit - } - object BaseData { - implicit val None: BaseData = new InvalidCache with BaseData { + object BaseData: + implicit val None: BaseData = new InvalidCache with BaseData: def apply(clsd: ClassDenotation)(implicit onBehalf: BaseData, ctx: Context) = ??? def signalProvisional() = () - } def newCache()(using Context): BaseData = new BaseDataImpl(ctx.period) - } - private abstract class InheritedCacheImpl(val createdAt: Period) extends InheritedCache { + private abstract class InheritedCacheImpl(val createdAt: Period) extends InheritedCache: protected def sameGroup(p1: Phase, p2: Phase): Boolean private var dependent: WeakHashMap[InheritedCache, Unit] | Null = null private var checkedPeriod: Period = Nowhere - protected def invalidateDependents() = { + protected def invalidateDependents() = import scala.language.unsafeNulls - if (dependent != null) { + if (dependent != null) val it = dependent.keySet.iterator() while (it.hasNext()) it.next().invalidate() - } dependent = null - } - protected def addDependent(dep: InheritedCache) = { + protected def addDependent(dep: InheritedCache) = if (dependent == null) dependent = new WeakHashMap dependent.nn.put(dep, ()) - } def isValidAt(phase: Phase)(using Context) = checkedPeriod.code == ctx.period.code || @@ -2899,15 +2747,13 @@ object SymDenotations { createdAt.phaseId < unfusedPhases.length && sameGroup(unfusedPhases(createdAt.phaseId), phase) && { checkedPeriod = ctx.period; true } - } - private class InvalidCache extends InheritedCache { + private class InvalidCache extends InheritedCache: def isValid(using Context) = false def isValidAt(phase: Phase)(using Context) = false def invalidate(): Unit = () - } - private class MemberNamesImpl(createdAt: Period) extends InheritedCacheImpl(createdAt) with MemberNames { + private class MemberNamesImpl(createdAt: Period) extends InheritedCacheImpl(createdAt) with MemberNames: private var cache: SimpleIdentityMap[NameFilter, Set[Name]] | Null = SimpleIdentityMap.empty final def isValid(using Context): Boolean = @@ -2922,31 +2768,27 @@ object SymDenotations { def invalidate(): Unit = if (cache != null) if (locked) cache = SimpleIdentityMap.empty - else { + else cache = null invalidateDependents() - } - def apply(keepOnly: NameFilter, clsd: ClassDenotation)(implicit onBehalf: MemberNames, ctx: Context) = { + def apply(keepOnly: NameFilter, clsd: ClassDenotation)(implicit onBehalf: MemberNames, ctx: Context) = assert(isValid) val cached = cache.nn(keepOnly) try if (cached != null) cached - else { + else locked = true val computed = try clsd.computeMemberNames(keepOnly)(this, ctx) finally locked = false cache = cache.nn.updated(keepOnly, computed) computed - } finally addDependent(onBehalf) - } def sameGroup(p1: Phase, p2: Phase) = p1.sameMembersStartId == p2.sameMembersStartId - } - private class BaseDataImpl(createdAt: Period) extends InheritedCacheImpl(createdAt) with BaseData { + private class BaseDataImpl(createdAt: Period) extends InheritedCacheImpl(createdAt) with BaseData: private var cache: (List[ClassSymbol], BaseClassSet) | Null = null private var valid = true @@ -2960,20 +2802,19 @@ object SymDenotations { // So basedata caches can become invalid only if the run changes. def invalidate(): Unit = - if (valid && !locked) { + if (valid && !locked) cache = null valid = false invalidateDependents() - } def signalProvisional() = provisional = true def apply(clsd: ClassDenotation)(implicit onBehalf: BaseData, ctx: Context) - : (List[ClassSymbol], BaseClassSet) = { + : (List[ClassSymbol], BaseClassSet) = assert(isValid) try if (cache != null) cache.uncheckedNN - else { + else if (locked) throw CyclicReference(clsd) locked = true provisional = false @@ -2983,76 +2824,60 @@ object SymDenotations { if (!provisional) cache = computed else onBehalf.signalProvisional() computed - } finally addDependent(onBehalf) - } def sameGroup(p1: Phase, p2: Phase) = p1.sameParentsStartId == p2.sameParentsStartId - } - class BaseClassSet(val classIds: Array[Int]) extends AnyVal { - def contains(sym: Symbol, limit: Int): Boolean = { + class BaseClassSet(val classIds: Array[Int]) extends AnyVal: + def contains(sym: Symbol, limit: Int): Boolean = val id = sym.id var i = 0 while (i < limit && classIds(i) != id) i += 1 - i < limit && { - if (i > 0) { + i < limit `&&`: + if (i > 0) val t = classIds(i) classIds(i) = classIds(i - 1) classIds(i - 1) = t - } true - } - } def contains(sym: Symbol): Boolean = contains(sym, classIds.length) - } - object BaseClassSet { + object BaseClassSet: def apply(bcs: List[ClassSymbol]): BaseClassSet = new BaseClassSet(bcs.toArray.map(_.id)) - } /** A class to combine base data from parent types */ - class BaseDataBuilder { + class BaseDataBuilder: private var classes: List[ClassSymbol] = Nil private var classIds = new Array[Int](32) private var length = 0 - private def resize(size: Int) = { + private def resize(size: Int) = val classIds1 = new Array[Int](size) System.arraycopy(classIds, 0, classIds1, 0, classIds.length min size) classIds = classIds1 - } - private def add(sym: Symbol): Unit = { + private def add(sym: Symbol): Unit = if (length == classIds.length) resize(length * 2) classIds(length) = sym.id length += 1 - } - def addAll(bcs: List[ClassSymbol]): this.type = { + def addAll(bcs: List[ClassSymbol]): this.type = val len = length - bcs match { + bcs match case bc :: bcs1 => addAll(bcs1) - if (!new BaseClassSet(classIds).contains(bc, len)) { + if (!new BaseClassSet(classIds).contains(bc, len)) add(bc) classes = bc :: classes - } case nil => - } this - } - def baseClassSet: BaseClassSet = { + def baseClassSet: BaseClassSet = if (length != classIds.length) resize(length) new BaseClassSet(classIds) - } def baseClasses: List[ClassSymbol] = classes - } private val packageTypeName = ModuleClassName(nme.PACKAGE).toTypeName @sharable private var indent = 0 // for completions printing -} diff --git a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala index 9eb67b468cfa..a93d1a8b0121 100644 --- a/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala +++ b/compiler/src/dotty/tools/dotc/core/SymbolLoaders.scala @@ -25,7 +25,7 @@ import parsing.JavaParsers.OutlineJavaParser import parsing.Parsers.OutlineParser -object SymbolLoaders { +object SymbolLoaders: import ast.untpd._ /** A marker trait for a completer that replaces the original @@ -35,41 +35,38 @@ object SymbolLoaders { private def enterNew( owner: Symbol, member: Symbol, - completer: SymbolLoader, scope: Scope = EmptyScope)(using Context): Symbol = { + completer: SymbolLoader, scope: Scope = EmptyScope)(using Context): Symbol = val comesFromScan = completer.isInstanceOf[SourcefileLoader] assert(comesFromScan || scope.lookup(member.name) == NoSymbol, s"${owner.fullName}.${member.name} already has a symbol") owner.asClass.enter(member, scope) member - } /** Enter class with given `name` into scope of `owner`. */ def enterClass( owner: Symbol, name: PreName, completer: SymbolLoader, - flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Symbol = { + flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Symbol = val cls = newClassSymbol(owner, name.toTypeName.unmangleClassName.decode, flags, completer, assocFile = completer.sourceFileOrNull) enterNew(owner, cls, completer, scope) - } /** Enter module with given `name` into scope of `owner`. */ def enterModule( owner: Symbol, name: PreName, completer: SymbolLoader, - modFlags: FlagSet = EmptyFlags, clsFlags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Symbol = { + modFlags: FlagSet = EmptyFlags, clsFlags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Symbol = val module = newModuleSymbol( owner, name.toTermName.decode, modFlags, clsFlags, (module, _) => completer.proxy.withDecls(newScope).withSourceModule(module), assocFile = completer.sourceFileOrNull) enterNew(owner, module, completer, scope) enterNew(owner, module.moduleClass, completer, scope) - } /** Enter package with given `name` into scope of `owner` * and give them `completer` as type. */ - def enterPackage(owner: Symbol, pname: TermName, completer: (TermSymbol, ClassSymbol) => PackageLoader)(using Context): Symbol = { + def enterPackage(owner: Symbol, pname: TermName, completer: (TermSymbol, ClassSymbol) => PackageLoader)(using Context): Symbol = val preExisting = owner.info.decls lookup pname if (preExisting != NoSymbol) // Some jars (often, obfuscated ones) include a package and @@ -77,37 +74,33 @@ object SymbolLoaders { // offer a setting to resolve the conflict one way or the other. // This was motivated by the desire to use YourKit probes, which // require yjp.jar at runtime. See SI-2089. - if (ctx.settings.YtermConflict.value == "package" || ctx.mode.is(Mode.Interactive)) { + if (ctx.settings.YtermConflict.value == "package" || ctx.mode.is(Mode.Interactive)) report.warning( s"Resolving package/object name conflict in favor of package ${preExisting.fullName}. The object will be inaccessible.") owner.asClass.delete(preExisting) - } - else if (ctx.settings.YtermConflict.value == "object") { + else if (ctx.settings.YtermConflict.value == "object") report.warning( s"Resolving package/object name conflict in favor of object ${preExisting.fullName}. The package will be inaccessible.") return NoSymbol - } else throw TypeError( em"""$owner contains object and package with same name: $pname |one of them needs to be removed from classpath""") newModuleSymbol(owner, pname, PackageCreationFlags, PackageCreationFlags, completer).entered - } /** Enter class and module with given `name` into scope of `owner` * and give them `completer` as type. */ def enterClassAndModule( owner: Symbol, name: PreName, completer: SymbolLoader, - flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Unit = { + flags: FlagSet = EmptyFlags, scope: Scope = EmptyScope)(using Context): Unit = val clazz = enterClass(owner, name, completer, flags, scope) val module = enterModule( owner, name, completer, modFlags = flags.toTermFlags & RetainedModuleValFlags, clsFlags = flags.toTypeFlags & RetainedModuleClassFlags, scope = scope) - } /** Enter all toplevel classes and objects in file `src` into package `owner`, provided * they are in the right package. Issue a warning if a class or object is in the wrong @@ -122,15 +115,14 @@ object SymbolLoaders { val completer = new SourcefileLoader(src) val filePath = owner.ownersIterator.takeWhile(!_.isRoot).map(_.name.toTermName).toList - def addPrefix(pid: RefTree, path: List[TermName]): List[TermName] = pid match { + def addPrefix(pid: RefTree, path: List[TermName]): List[TermName] = pid match case Ident(name: TermName) => name :: path case Select(qual: RefTree, name: TermName) => name :: addPrefix(qual, path) case _ => path - } - def enterScanned(unit: CompilationUnit)(using Context) = { + def enterScanned(unit: CompilationUnit)(using Context) = - def checkPathMatches(path: List[TermName], what: String, tree: NameTree): Boolean = { + def checkPathMatches(path: List[TermName], what: String, tree: NameTree): Boolean = val ok = filePath == path if (!ok) report.warning(i"""$what ${tree.name} is in the wrong directory. @@ -138,7 +130,6 @@ object SymbolLoaders { |But it is found in directory ${filePath.reverse.mkString(File.separator.nn)}""", tree.srcPos.focus) ok - } /** Run the subset of desugaring necessary to record the correct symbols */ def simpleDesugar(tree: Tree): Tree = tree match @@ -149,7 +140,7 @@ object SymbolLoaders { case _ => tree - def traverse(tree: Tree, path: List[TermName]): Unit = simpleDesugar(tree) match { + def traverse(tree: Tree, path: List[TermName]): Unit = simpleDesugar(tree) match case tree @ PackageDef(pid, body) => val path1 = addPrefix(pid, path) for (stat <- body) traverse(stat, path1) @@ -162,13 +153,11 @@ object SymbolLoaders { if (checkPathMatches(path, "object", tree)) enterModule(owner, tree.name, completer, scope = scope) case _ => - } traverse( if (unit.isJava) new OutlineJavaParser(unit.source).parse() else new OutlineParser(unit.source).parse(), Nil) - } val unit = CompilationUnit(ctx.getSource(src)) enterScanned(unit)(using ctx.fresh.setCompilationUnit(unit)) @@ -187,7 +176,7 @@ object SymbolLoaders { /** Initialize toplevel class and module symbols in `owner` from class path representation `classRep` */ def initializeFromClassPath(owner: Symbol, classRep: ClassRepresentation)(using Context): Unit = - ((classRep.binary, classRep.source): @unchecked) match { + ((classRep.binary, classRep.source): @unchecked) match case (Some(bin), Some(src)) if needCompile(bin, src) && !binaryOnly(owner, nameOf(classRep)) => if (ctx.settings.verbose.value) report.inform("[symloader] picked up newer source file for " + src.path) enterToplevelsFromSource(owner, nameOf(classRep), src) @@ -196,7 +185,6 @@ object SymbolLoaders { enterToplevelsFromSource(owner, nameOf(classRep), src) case (Some(bin), _) => enterClassAndModule(owner, nameOf(classRep), ctx.platform.newClassLoader(bin)) - } def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = src.lastModified >= bin.lastModified @@ -206,7 +194,7 @@ object SymbolLoaders { /** Load contents of a package */ - class PackageLoader(_sourceModule: TermSymbol, classPath: ClassPath) extends SymbolLoader { + class PackageLoader(_sourceModule: TermSymbol, classPath: ClassPath) extends SymbolLoader: override def sourceFileOrNull: AbstractFile | Null = null override def sourceModule(using Context): TermSymbol = _sourceModule def description(using Context): String = "package loader " + sourceModule.fullName @@ -218,51 +206,46 @@ object SymbolLoaders { /** The scope of a package. This is different from a normal scope * in that names of scope entries are kept in mangled form. */ - final class PackageScope extends MutableScope(0) { + final class PackageScope extends MutableScope(0): override def newScopeEntry(name: Name, sym: Symbol)(using Context): ScopeEntry = super.newScopeEntry(name.mangled, sym) - override def lookupEntry(name: Name)(using Context): ScopeEntry | Null = { + override def lookupEntry(name: Name)(using Context): ScopeEntry | Null = val mangled = name.mangled val e = super.lookupEntry(mangled) if (e != null) e - else if (isFlatName(mangled.toSimpleName) && enterFlatClasses.isDefined) { + else if (isFlatName(mangled.toSimpleName) && enterFlatClasses.isDefined) Stats.record("package scopes with flatnames entered") enterFlatClasses.get() lookupEntry(name) - } else e - } override def ensureComplete()(using Context): Unit = for (enter <- enterFlatClasses) enter() override def newScopeLikeThis(): PackageScope = new PackageScope - } private[core] val currentDecls: MutableScope = new PackageScope() - private def isFlatName(name: SimpleName): Boolean = { + private def isFlatName(name: SimpleName): Boolean = val idx = name.lastIndexOf('$', name.length - 2) idx >= 0 && (idx + str.TOPLEVEL_SUFFIX.length + 1 != name.length || !name.endsWith(str.TOPLEVEL_SUFFIX)) - } /** Name of class contains `$`, excepted names ending in `$package` */ - def hasFlatName(classRep: ClassRepresentation): Boolean = { + def hasFlatName(classRep: ClassRepresentation): Boolean = val name = classRep.name val idx = name.lastIndexOf('$', name.length - 2) idx >= 0 && (idx + str.TOPLEVEL_SUFFIX.length + 1 != name.length || !name.endsWith(str.TOPLEVEL_SUFFIX)) - } def maybeModuleClass(classRep: ClassRepresentation): Boolean = classRep.name.last == '$' - private def enterClasses(root: SymDenotation, packageName: String, flat: Boolean)(using Context) = { + private def enterClasses(root: SymDenotation, packageName: String, flat: Boolean)(using Context) = def isAbsent(classRep: ClassRepresentation) = !root.unforcedDecls.lookup(classRep.name.toTypeName).exists - if (!root.isRoot) { + if (!root.isRoot) val classReps = classPath.list(packageName).classesAndSources for (classRep <- classReps) @@ -273,10 +256,8 @@ object SymbolLoaders { if (maybeModuleClass(classRep) && hasFlatName(classRep) == flat && isAbsent(classRep)) initializeFromClassPath(root.symbol, classRep) - } - } - def doComplete(root: SymDenotation)(using Context): Unit = { + def doComplete(root: SymDenotation)(using Context): Unit = assert(root is PackageClass, root) val pre = root.owner.thisType root.info = ClassInfo(pre, root.symbol.asClass, Nil, currentDecls, pre select sourceModule) @@ -291,7 +272,7 @@ object SymbolLoaders { } enterClasses(root, packageName, flat = false) if (!root.isEmptyPackage) - for (pkg <- classPath.packages(packageName)) { + for (pkg <- classPath.packages(packageName)) val fullName = pkg.name val name = if (packageName.isEmpty) fullName @@ -299,10 +280,6 @@ object SymbolLoaders { enterPackage(root.symbol, name.toTermName, (module, modcls) => new PackageLoader(module, classPath)) - } - } - } -} /** A lazy type that completes itself by calling parameter doComplete. * Any linked modules/classes or module classes are also initialized. @@ -321,28 +298,24 @@ abstract class SymbolLoader extends LazyType { self => /** A proxy to this loader that keeps the doComplete operation * but provides fresh slots for scope/sourceModule/moduleClass */ - def proxy: SymbolLoader = new SymbolLoader { + def proxy: SymbolLoader = new SymbolLoader: export self.{doComplete, sourceFileOrNull} def description(using Context): String = s"proxy to ${self.description}" - } - override def complete(root: SymDenotation)(using Context): Unit = { - def signalError(ex: Exception): Unit = { + override def complete(root: SymDenotation)(using Context): Unit = + def signalError(ex: Exception): Unit = if (ctx.debug) ex.printStackTrace() val msg = ex.getMessage() report.error( if msg == null then em"i/o error while loading ${root.name}" else em"""error while loading ${root.name}, |$msg""") - } - try { + try val start = System.currentTimeMillis - trace.onDebug("loading") { + trace.onDebug("loading"): doComplete(root) - } report.informTime("loaded " + description, start) - } - catch { + catch case ex: InterruptedException => throw ex case ex: ClosedByInterruptException => @@ -355,8 +328,7 @@ abstract class SymbolLoader extends LazyType { self => case NonFatal(ex) => println(s"exception caught when loading $root: $ex") throw ex - } - finally { + finally def postProcess(denot: SymDenotation, other: Symbol) = if !denot.isCompleted && !denot.completer.isInstanceOf[SymbolLoaders.SecondCompleter] then @@ -370,11 +342,9 @@ abstract class SymbolLoader extends LazyType { self => postProcess(root, other) if (!root.isRoot) postProcess(other, root.symbol) - } - } - protected def rootDenots(rootDenot: ClassDenotation)(using Context): (ClassDenotation, ClassDenotation) = { - val linkedDenot = rootDenot.scalacLinkedClass.denot match { + protected def rootDenots(rootDenot: ClassDenotation)(using Context): (ClassDenotation, ClassDenotation) = + val linkedDenot = rootDenot.scalacLinkedClass.denot match case d: ClassDenotation => d case d => // this can happen if the companion if shadowed by a val or type @@ -391,13 +361,11 @@ abstract class SymbolLoader extends LazyType { self => rootDenot.owner, rootDenot.name.toTermName, Synthetic, Synthetic, (module, _) => NoLoader().withDecls(newScope).withSourceModule(module)) .moduleClass.denot.asClass - } if (rootDenot.is(ModuleClass)) (linkedDenot, rootDenot) else (rootDenot, linkedDenot) - } } -class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader { +class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader: override def sourceFileOrNull: AbstractFile | Null = classfile @@ -406,36 +374,31 @@ class ClassfileLoader(val classfile: AbstractFile) extends SymbolLoader { override def doComplete(root: SymDenotation)(using Context): Unit = load(root) - def load(root: SymDenotation)(using Context): Unit = { + def load(root: SymDenotation)(using Context): Unit = val (classRoot, moduleRoot) = rootDenots(root.asClass) val classfileParser = new ClassfileParser(classfile, classRoot, moduleRoot)(ctx) val result = classfileParser.run() if (mayLoadTreesFromTasty) - result match { + result match case Some(unpickler: tasty.DottyUnpickler) => classRoot.classSymbol.rootTreeOrProvider = unpickler moduleRoot.classSymbol.rootTreeOrProvider = unpickler case _ => - } - } private def mayLoadTreesFromTasty(using Context): Boolean = ctx.settings.YretainTrees.value || ctx.settings.fromTasty.value -} -class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader { +class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader: def description(using Context): String = "source file " + srcfile.toString override def sourceFileOrNull: AbstractFile | Null = srcfile def doComplete(root: SymDenotation)(using Context): Unit = ctx.run.nn.lateCompile(srcfile, typeCheck = ctx.settings.YretainTrees.value) -} /** A NoCompleter which is also a SymbolLoader. */ -class NoLoader extends SymbolLoader with NoCompleter { +class NoLoader extends SymbolLoader with NoCompleter: def description(using Context): String = "NoLoader" override def sourceFileOrNull: AbstractFile | Null = null override def complete(root: SymDenotation)(using Context): Unit = super[NoCompleter].complete(root) def doComplete(root: SymDenotation)(using Context): Unit = unsupported("doComplete") -} diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 07ac2be90819..44aac107fa85 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -32,7 +32,7 @@ import util.{SourceFile, NoSource, Property, SourcePosition, SrcPos, EqHashMap} import scala.annotation.internal.sharable import config.Printers.typr -object Symbols { +object Symbols: implicit def eqSymbol: CanEqual[Symbol, Symbol] = CanEqual.derived @@ -44,7 +44,7 @@ object Symbols { * @param id A unique identifier of the symbol (unique per ContextBase) */ class Symbol private[Symbols] (private var myCoord: Coord, val id: Int, val nestingLevel: Int) - extends Designator, ParamInfo, SrcPos, printing.Showable { + extends Designator, ParamInfo, SrcPos, printing.Showable: type ThisName <: Name @@ -58,12 +58,11 @@ object Symbols { * * @pre coord == NoCoord */ - private[core] def coord_=(c: Coord): Unit = { + private[core] def coord_=(c: Coord): Unit = // assert(myCoord == NoCoord) // This assertion fails for CommentPickling test. // TODO: figure out what's wrong in the setup of CommentPicklingTest and re-enable assertion. myCoord = c - } private var myDefTree: Tree | Null = null @@ -94,33 +93,29 @@ object Symbols { /** Set the denotation of this symbol * `denot` should always be initialized when a new Symbol is created. */ - private[core] def denot_=(d: SymDenotation): Unit = { + private[core] def denot_=(d: SymDenotation): Unit = util.Stats.record("Symbol.denot_=") lastDenot = d checkedPeriod = Nowhere - } /** The current denotation of this symbol */ - final def denot(using Context): SymDenotation = { + final def denot(using Context): SymDenotation = util.Stats.record("Symbol.denot") if checkedPeriod.code == ctx.period.code then lastDenot else computeDenot(lastDenot) - } - private def computeDenot(lastd: SymDenotation)(using Context): SymDenotation = { + private def computeDenot(lastd: SymDenotation)(using Context): SymDenotation = util.Stats.record("Symbol.computeDenot") val now = ctx.period checkedPeriod = now if (lastd.validFor contains now) lastd else recomputeDenot(lastd) - } /** Overridden in NoSymbol */ - protected def recomputeDenot(lastd: SymDenotation)(using Context): SymDenotation = { + protected def recomputeDenot(lastd: SymDenotation)(using Context): SymDenotation = util.Stats.record("Symbol.recomputeDenot") val newd = lastd.current.asInstanceOf[SymDenotation] lastDenot = newd newd - } /** The original denotation of this symbol, without forcing anything */ final def originDenotation: SymDenotation = @@ -165,14 +160,12 @@ object Symbols { (if (defRunId == ctx.runId) lastDenot else denot).isTerm final def isType(using Context): Boolean = (if (defRunId == ctx.runId) lastDenot else denot).isType - final def asTerm(using Context): TermSymbol = { + final def asTerm(using Context): TermSymbol = assert(isTerm, s"asTerm called on not-a-Term $this" ); asInstanceOf[TermSymbol] - } - final def asType(using Context): TypeSymbol = { + final def asType(using Context): TypeSymbol = assert(isType, s"asType called on not-a-Type $this"); asInstanceOf[TypeSymbol] - } final def isClass: Boolean = isInstanceOf[ClassSymbol] final def asClass: ClassSymbol = asInstanceOf[ClassSymbol] @@ -201,13 +194,11 @@ object Symbols { lastDenot.initial.isStatic /** This symbol entered into owner's scope (owner must be a class). */ - final def entered(using Context): this.type = { - if (this.owner.isClass) { + final def entered(using Context): this.type = + if (this.owner.isClass) this.owner.asClass.enter(this) if (this.is(Module)) this.owner.asClass.enter(this.moduleClass) - } this - } /** Enter this symbol in its class owner after given `phase`. Create a fresh * denotation for its owner class if the class does not already have one @@ -217,23 +208,20 @@ object Symbols { def enteredAfter(phase: DenotTransformer)(using Context): this.type = if ctx.phaseId != phase.next.id then atPhase(phase.next)(enteredAfter(phase)) - else this.owner match { + else this.owner match case owner: ClassSymbol => - if (owner.is(Package)) { + if (owner.is(Package)) denot.validFor |= InitialPeriod if (this.is(Module)) this.moduleClass.validFor |= InitialPeriod - } else owner.ensureFreshScopeAfter(phase) assert(isPrivate || phase.changesMembers, i"$this entered in $owner at undeclared phase $phase") entered case _ => this - } /** Remove symbol from scope of owning class */ - final def drop()(using Context): Unit = { + final def drop()(using Context): Unit = this.owner.asClass.delete(this) if (this.is(Module)) this.owner.asClass.delete(this.moduleClass) - } /** Remove symbol from scope of owning class after given `phase`. Create a fresh * denotation for its owner class if the class does not already have one that starts being valid after `phase`. @@ -242,12 +230,11 @@ object Symbols { def dropAfter(phase: DenotTransformer)(using Context): Unit = if ctx.phaseId != phase.next.id then atPhase(phase.next)(dropAfter(phase)) - else { + else assert (!this.owner.is(Package)) this.owner.asClass.ensureFreshScopeAfter(phase) assert(isPrivate || phase.changesMembers, i"$this deleted in ${this.owner} at undeclared phase $phase") drop() - } /** This symbol, if it exists, otherwise the result of evaluating `that` */ inline def orElse(inline that: Symbol)(using Context): Symbol = @@ -269,10 +256,9 @@ object Symbols { lastDenot.topLevelClass.associatedFile /** The class file from which this class was generated, null if not applicable. */ - final def binaryFile(using Context): AbstractFile | Null = { + final def binaryFile(using Context): AbstractFile | Null = val file = associatedFile if (file != null && file.extension == "class") file else null - } /** A trap to avoid calling x.symbol on something that is already a symbol. * This would be expanded to `toDenot(x).symbol` which is guaraneteed to be @@ -282,27 +268,23 @@ object Symbols { final def symbol(implicit ev: DontUseSymbolOnSymbol): Nothing = unsupported("symbol") type DontUseSymbolOnSymbol - final def source(using Context): SourceFile = { + final def source(using Context): SourceFile = def valid(src: SourceFile): SourceFile = if (src.exists && src.file.extension != "class") src else NoSource if (!denot.exists) NoSource else - valid(defTree.source) match { + valid(defTree.source) match case NoSource => - valid(denot.owner.source) match { + valid(denot.owner.source) match case NoSource => - this match { + this match case cls: ClassSymbol => valid(cls.sourceOfClass) case _ if denot.is(Module) => valid(denot.moduleClass.source) case _ => NoSource - } case src => src - } case src => src - } - } /** A symbol related to `sym` that is defined in source code. * @@ -313,13 +295,12 @@ object Symbols { this else if (denot.is(ModuleVal)) this.moduleClass.sourceSymbol // The module val always has a zero-extent position - else if (denot.is(Synthetic)) { + else if (denot.is(Synthetic)) val linked = denot.linkedClass if (linked.exists && !linked.is(Synthetic)) linked else denot.owner.sourceSymbol - } else if (denot.isPrimaryConstructor) denot.owner.sourceSymbol else this @@ -329,10 +310,9 @@ object Symbols { */ final def span: Span = if (coord.isSpan) coord.toSpan else NoSpan - final def sourcePos(using Context): SourcePosition = { + final def sourcePos(using Context): SourcePosition = val src = source (if (src.exists) src else ctx.source).atSpan(span) - } /** This positioned item, widened to `SrcPos`. Used to make clear we only need the * position, typically for error reporting. @@ -359,7 +339,7 @@ object Symbols { privateWithin: Symbol = this.privateWithin, coord: Coord = NoCoord, // Can be `= owner.coord` once we bootstrap associatedFile: AbstractFile | Null = null // Can be `= owner.associatedFile` once we bootstrap - ): Symbol = { + ): Symbol = val coord1 = if (coord == NoCoord) owner.coord else coord val associatedFile1 = if (associatedFile == null) owner.associatedFile else associatedFile @@ -367,7 +347,6 @@ object Symbols { newClassSymbol(owner, name.asTypeName, flags, _ => info, privateWithin, coord1, associatedFile1) else newSymbol(owner, name, flags, info, privateWithin, coord1) - } // -------- Printing -------------------------------------------------------- @@ -387,13 +366,12 @@ object Symbols { def showFullName(using Context): String = ctx.printer.fullNameString(this) override def hashCode(): Int = id // for debugging. - } type TermSymbol = Symbol { type ThisName = TermName } type TypeSymbol = Symbol { type ThisName = TypeName } class ClassSymbol private[Symbols] (coord: Coord, val assocFile: AbstractFile | Null, id: Int, nestingLevel: Int) - extends Symbol(coord, id, nestingLevel) { + extends Symbol(coord, id, nestingLevel): type ThisName = TypeName @@ -411,45 +389,38 @@ object Symbols { * For Tasty trees this means consulting whether the name table defines `id`. * For already loaded trees, we maintain the referenced ids in an attachment. */ - def rootTreeContaining(id: String)(using Context): Tree = { - denot.infoOrCompleter match { + def rootTreeContaining(id: String)(using Context): Tree = + denot.infoOrCompleter match case _: NoCompleter => case _ => denot.ensureCompleted() - } - myTree match { + myTree match case fn: TreeProvider => - if (id.isEmpty || fn.mightContain(id)) { + if (id.isEmpty || fn.mightContain(id)) val tree = fn.tree myTree = tree tree - } else tpd.EmptyTree case tree: Tree @ unchecked => if (id.isEmpty || mightContain(tree, id)) tree else tpd.EmptyTree - } - } def rootTreeOrProvider: TreeOrProvider = myTree private[dotc] def rootTreeOrProvider_=(t: TreeOrProvider)(using Context): Unit = myTree = t - private def mightContain(tree: Tree, id: String)(using Context): Boolean = { - val ids = tree.getAttachment(Ids) match { + private def mightContain(tree: Tree, id: String)(using Context): Boolean = + val ids = tree.getAttachment(Ids) match case Some(ids) => ids case None => val idSet = mutable.SortedSet[String]() - tree.foreachSubTree { + tree.foreachSubTree: case tree: tpd.NameTree if tree.name.toTermName.isInstanceOf[SimpleName] => idSet += tree.name.toString case _ => - } val ids = idSet.toArray tree.putAttachment(Ids, ids) ids - } ids.binarySearch(id) >= 0 - } /** The source or class file from which this class was generated, null if not applicable. */ override def associatedFile(using Context): AbstractFile | Null = @@ -458,7 +429,7 @@ object Symbols { private var mySource: SourceFile = NoSource - final def sourceOfClass(using Context): SourceFile = { + final def sourceOfClass(using Context): SourceFile = if !mySource.exists && !denot.is(Package) then // this allows sources to be added in annotations after `sourceOfClass` is first called val file = associatedFile @@ -467,26 +438,22 @@ object Symbols { else mySource = defn.patchSource(this) if !mySource.exists then - mySource = atPhaseNoLater(flattenPhase) { + mySource = atPhaseNoLater(flattenPhase): denot.topLevelClass.unforcedAnnotation(defn.SourceFileAnnot) match case Some(sourceAnnot) => sourceAnnot.argumentConstant(0) match - case Some(Constant(path: String)) => ctx.getSource(path) - case none => NoSource + case Some(Constant(path: String)) => ctx.getSource(path) + case none => NoSource case none => NoSource - } mySource - } final def classDenot(using Context): ClassDenotation = denot.asInstanceOf[ClassDenotation] override protected def prefixString: String = "ClassSymbol" - } - @sharable object NoSymbol extends Symbol(NoCoord, 0, 0) { + @sharable object NoSymbol extends Symbol(NoCoord, 0, 0): override def associatedFile(using Context): AbstractFile | Null = NoSource.file override def recomputeDenot(lastd: SymDenotation)(using Context): SymDenotation = NoDenotation - } NoDenotation // force it in order to set `denot` field of NoSymbol @@ -516,12 +483,11 @@ object Symbols { info: Type, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, - nestingLevel: Int = ctx.nestingLevel): Symbol { type ThisName = N } = { + nestingLevel: Int = ctx.nestingLevel): Symbol { type ThisName = N } = val sym = new Symbol(coord, ctx.base.nextSymId, nestingLevel).asInstanceOf[Symbol { type ThisName = N }] val denot = SymDenotation(sym, owner, name, flags, info, privateWithin) sym.denot = denot sym - } /** Create a class symbol from its non-info fields and a function * producing its info (the produced info may be lazy). @@ -534,12 +500,11 @@ object Symbols { privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, assocFile: AbstractFile | Null = null)(using Context): ClassSymbol - = { + = val cls = new ClassSymbol(coord, assocFile, ctx.base.nextSymId, ctx.nestingLevel) val denot = SymDenotation(cls, owner, name, flags, infoFn(cls), privateWithin) cls.denot = denot cls - } /** Create a class symbol from its non-info fields and the fields of its info. */ def newCompleteClassSymbol( @@ -568,16 +533,13 @@ object Symbols { selfInfo: Type = NoType, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, - assocFile: AbstractFile | Null = null)(using Context): ClassSymbol = { - def completer = new LazyType { - def complete(denot: SymDenotation)(using Context): Unit = { + assocFile: AbstractFile | Null = null)(using Context): ClassSymbol = + def completer = new LazyType: + def complete(denot: SymDenotation)(using Context): Unit = val cls = denot.asClass.classSymbol val decls = newScope denot.info = ClassInfo(owner.thisType, cls, parentTypes.map(_.dealias), decls, selfInfo) - } - } newClassSymbol(owner, name, flags, completer, privateWithin, coord, assocFile) - } def newRefinedClassSymbol(coord: Coord = NoCoord)(using Context): ClassSymbol = newCompleteClassSymbol(ctx.owner, tpnme.REFINE_CLASS, NonMember, parents = Nil, newScope, coord = coord) @@ -595,7 +557,7 @@ object Symbols { privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, assocFile: AbstractFile | Null = null)(using Context): TermSymbol - = { + = val base = owner.thisType val modclsFlags = clsFlags | ModuleClassCreationFlags val modclsName = name.toTypeName.adjustIfModuleClass(modclsFlags) @@ -607,7 +569,6 @@ object Symbols { if (modcls.isCompleted) TypeRef(owner.thisType, modcls) else new ModuleCompleter(modcls) module - } /** Create a module symbol with associated module class * from its non-info fields and the fields of the module class info. @@ -642,19 +603,16 @@ object Symbols { decls: Scope, privateWithin: Symbol = NoSymbol, coord: Coord = NoCoord, - assocFile: AbstractFile | Null = null)(using Context): TermSymbol = { - def completer(module: Symbol) = new LazyType { - def complete(denot: SymDenotation)(using Context): Unit = { + assocFile: AbstractFile | Null = null)(using Context): TermSymbol = + def completer(module: Symbol) = new LazyType: + def complete(denot: SymDenotation)(using Context): Unit = val cls = denot.asClass.classSymbol val decls = newScope denot.info = ClassInfo(owner.thisType, cls, parentTypes.map(_.dealias), decls, TermRef(owner.thisType, module)) - } - } newModuleSymbol( owner, name, modFlags, clsFlags, (module, modcls) => completer(module), privateWithin, coord, assocFile) - } /** Create a package symbol with associated package class * from its non-info fields and a lazy type for loading the package's members. @@ -684,29 +642,26 @@ object Symbols { info: Type, span: Span, addToGadt: Boolean = true, - flags: FlagSet = EmptyFlags)(using Context): Symbol = { + flags: FlagSet = EmptyFlags)(using Context): Symbol = val sym = newSymbol(ctx.owner, name, Case | flags, info, coord = span) if (addToGadt && name.isTypeName) ctx.gadtState.addToConstraint(sym) sym - } /** Create a stub symbol that will issue a missing reference error * when attempted to be completed. */ - def newStubSymbol(owner: Symbol, name: Name, file: AbstractFile | Null = null)(using Context): Symbol = { + def newStubSymbol(owner: Symbol, name: Name, file: AbstractFile | Null = null)(using Context): Symbol = def stubCompleter = new StubInfo() val normalizedOwner = if (owner.is(ModuleVal)) owner.moduleClass else owner typr.println(s"creating stub for ${name.show}, owner = ${normalizedOwner.denot.debugString}, file = $file") typr.println(s"decls = ${normalizedOwner.unforcedDecls.toList.map(_.debugString).mkString("\n ")}") // !!! DEBUG //if (base.settings.debug.value) throw new Error() - val stub = name match { + val stub = name match case name: TermName => newModuleSymbol(normalizedOwner, name, EmptyFlags, EmptyFlags, stubCompleter, assocFile = file) case name: TypeName => newClassSymbol(normalizedOwner, name, EmptyFlags, stubCompleter, assocFile = file) - } stub - } /** Create the local template dummy of given class `cls`. * In a template @@ -764,22 +719,20 @@ object Symbols { owner: Symbol, names: List[TypeName], flags: FlagSet, - boundsFn: List[TypeRef] => List[Type])(using Context): List[TypeSymbol] = { + boundsFn: List[TypeRef] => List[Type])(using Context): List[TypeSymbol] = val tparamBuf = new mutable.ListBuffer[TypeSymbol] val trefBuf = new mutable.ListBuffer[TypeRef] - for (name <- names) { + for (name <- names) val tparam = newSymbol( owner, name, flags | owner.typeParamCreationFlags, NoType, coord = owner.coord) tparamBuf += tparam trefBuf += TypeRef(owner.thisType, tparam) - } val tparams = tparamBuf.toList val bounds = boundsFn(trefBuf.toList) for (tparam, bound) <- tparams.lazyZip(bounds) do tparam.info = bound tparams - } /** Create a new skolem symbol. This is not the same as SkolemType, even though the * motivation (create a singleton referencing to a type) is similar. @@ -787,11 +740,10 @@ object Symbols { def newSkolem(tp: Type)(using Context): TermSymbol = newSymbol(defn.RootClass, nme.SKOLEM, SyntheticArtifact | NonMember | Permanent, tp) - def newErrorSymbol(owner: Symbol, name: Name, msg: Message)(using Context): Symbol = { + def newErrorSymbol(owner: Symbol, name: Name, msg: Message)(using Context): Symbol = val errType = ErrorType(msg) newSymbol(owner, name, SyntheticArtifact, if (name.isTypeName) TypeAlias(errType) else errType) - } /** Map given symbols, subjecting their attributes to the mappings * defined in the given TreeTypeMap `ttmap`. @@ -803,7 +755,7 @@ object Symbols { (ttmap.mapType(sym.info) eq sym.info) && !(ttmap.oldOwners contains sym.owner)) && !mapAlways) originals - else { + else val copies: List[Symbol] = for (original <- originals) yield val odenot = original.denot original.copy( @@ -866,7 +818,6 @@ object Symbols { sealedCopy.annotations = sealedCopy.annotations.mapConserve(ttmap1.apply) copies - } /** Matches lists of term symbols, including the empty list. * All symbols in the list are assumed to be of the same kind. @@ -886,20 +837,17 @@ object Symbols { // ----- Locating predefined symbols ---------------------------------------- - def requiredPackage(path: PreName)(using Context): TermSymbol = { + def requiredPackage(path: PreName)(using Context): TermSymbol = val name = path.toTermName staticRef(name, isPackage = true).requiredSymbol("package", name)(_.is(Package)).asTerm - } def requiredPackageRef(path: PreName)(using Context): TermRef = requiredPackage(path).termRef - def requiredClass(path: PreName)(using Context): ClassSymbol = { + def requiredClass(path: PreName)(using Context): ClassSymbol = val name = path.toTypeName - staticRef(name).requiredSymbol("class", name)(_.isClass) match { + staticRef(name).requiredSymbol("class", name)(_.isClass) match case cls: ClassSymbol => cls case sym => defn.AnyClass - } - } def requiredClassRef(path: PreName)(using Context): TypeRef = requiredClass(path).typeRef @@ -923,10 +871,9 @@ object Symbols { staticRef(path.toTypeName, isPackage = true, generateStubs = false) .disambiguate(_ is PackageClass).symbol - def requiredModule(path: PreName)(using Context): TermSymbol = { + def requiredModule(path: PreName)(using Context): TermSymbol = val name = path.toTermName staticRef(name).requiredSymbol("object", name)(_.is(Module)).asTerm - } /** Get module symbol if the module is either defined in current compilation run * or present on classpath. Returns NoSymbol otherwise. @@ -937,10 +884,8 @@ object Symbols { def requiredModuleRef(path: PreName)(using Context): TermRef = requiredModule(path).termRef - def requiredMethod(path: PreName)(using Context): TermSymbol = { + def requiredMethod(path: PreName)(using Context): TermSymbol = val name = path.toTermName staticRef(name).requiredSymbol("method", name)(_.is(Method)).asTerm - } def requiredMethodRef(path: PreName)(using Context): TermRef = requiredMethod(path).termRef -} diff --git a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala index 2e8aee4df96c..152536d74e30 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeApplications.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeApplications.scala @@ -15,15 +15,14 @@ import dotty.tools.dotc.config.Config import cc.boxedUnlessFun import dotty.tools.dotc.transform.TypeUtils.isErasedValueType -object TypeApplications { +object TypeApplications: type TypeParamInfo = ParamInfo.Of[TypeName] /** Assert type is not a TypeBounds instance and return it unchanged */ - def noBounds(tp: Type): Type = tp match { + def noBounds(tp: Type): Type = tp match case tp: TypeBounds => throw new AssertionError("no TypeBounds allowed") case _ => tp - } /** Extractor for * @@ -109,56 +108,50 @@ object TypeApplications { * result type. Using this mode, we can guarantee that `appliedTo` will never * produce a higher-kinded application with a type lambda as type constructor. */ - class Reducer(tycon: TypeLambda, args: List[Type])(using Context) extends TypeMap { + class Reducer(tycon: TypeLambda, args: List[Type])(using Context) extends TypeMap: private var available = (0 until args.length).toSet var allReplaced: Boolean = true def hasWildcardArg(p: TypeParamRef): Boolean = p.binder == tycon && isBounds(args(p.paramNum)) def canReduceWildcard(p: TypeParamRef): Boolean = !ctx.mode.is(Mode.AllowLambdaWildcardApply) || available.contains(p.paramNum) - def atNestedLevel(op: => Type): Type = { + def atNestedLevel(op: => Type): Type = val saved = available available = Set() try op finally available = saved - } // If this is a reference to a reducable type parameter corresponding to a // wildcard argument, return the wildcard argument, otherwise apply recursively. - def applyArg(arg: Type): Type = arg match { + def applyArg(arg: Type): Type = arg match case p: TypeParamRef if hasWildcardArg(p) && canReduceWildcard(p) => available -= p.paramNum args(p.paramNum) case _ => atNestedLevel(apply(arg)) - } - def apply(t: Type): Type = t match { + def apply(t: Type): Type = t match case t @ AppliedType(tycon, args1) if tycon.typeSymbol.isClass => t.derivedAppliedType(apply(tycon), args1.mapConserve(applyArg)) case t @ RefinedType(parent, name, TypeAlias(info)) => t.derivedRefinedType(apply(parent), name, applyArg(info).bounds) case p: TypeParamRef if p.binder == tycon => - args(p.paramNum) match { + args(p.paramNum) match case TypeBounds(lo, hi) => if (ctx.mode.is(Mode.AllowLambdaWildcardApply)) { allReplaced = false; p } else if (variance < 0) lo else hi case arg => arg - } case _: TypeBounds | _: AppliedType => atNestedLevel(mapOver(t)) case _ => mapOver(t) - } - } -} import TypeApplications._ /** A decorator that provides methods for modeling type application */ -class TypeApplications(val self: Type) extends AnyVal { +class TypeApplications(val self: Type) extends AnyVal: /** The type parameters of this type are: * For a ClassInfo type, the type parameters of its class. @@ -166,9 +159,9 @@ class TypeApplications(val self: Type) extends AnyVal { * For a refinement type, the type parameters of its parent, dropping * any type parameter that is-rebound by the refinement. */ - final def typeParams(using Context): List[TypeParamInfo] = { + final def typeParams(using Context): List[TypeParamInfo] = record("typeParams") - def isTrivial(prefix: Type, tycon: Symbol) = prefix match { + def isTrivial(prefix: Type, tycon: Symbol) = prefix match case prefix: ThisType => prefix.cls eq tycon.owner case prefix: TermRef => @@ -176,15 +169,13 @@ class TypeApplications(val self: Type) extends AnyVal { sym.is(Module) && sym.isStatic && (sym.moduleClass eq tycon.owner) case NoPrefix => true case _ => false - } - try self match { + try self match case self: TypeRef => val tsym = self.symbol if (tsym.isClass) tsym.typeParams - else tsym.infoOrCompleter match { + else tsym.infoOrCompleter match case info: LazyType if isTrivial(self.prefix, tsym) => info.completerTypeParams(tsym) case _ => self.info.typeParams - } case self: AppliedType => if (self.tycon.typeSymbol.isClass) Nil else self.superType.typeParams @@ -200,11 +191,8 @@ class TypeApplications(val self: Type) extends AnyVal { self.superType.typeParams case _ => Nil - } - catch { + catch case ex: Throwable => handleRecursive("type parameters of", self.show, ex) - } - } /** Substitute in `self` the type parameters of `tycon` by some other types. */ final def substTypeParams(tycon: Type, to: List[Type])(using Context): Type = @@ -217,7 +205,7 @@ class TypeApplications(val self: Type) extends AnyVal { if (isLambdaSub) typeParams else Nil /** If `self` is a generic class, its type parameter symbols, otherwise Nil */ - final def typeParamSymbols(using Context): List[TypeSymbol] = typeParams match { + final def typeParamSymbols(using Context): List[TypeSymbol] = typeParams match case tparams @ (_: Symbol) :: _ => assert(tparams.forall(_.isInstanceOf[Symbol])) tparams.asInstanceOf[List[TypeSymbol]] @@ -226,17 +214,15 @@ class TypeApplications(val self: Type) extends AnyVal { // whereas the second call gives some LambdaParams. This was observed // for ticket0137.scala case _ => Nil - } /** Is self type bounded by a type lambda or AnyKind? */ def isLambdaSub(using Context): Boolean = hkResult.exists /** Is self type of kind "*"? */ def hasSimpleKind(using Context): Boolean = - typeParams.isEmpty && !self.hasAnyKind || { + typeParams.isEmpty && !self.hasAnyKind `||`: val alias = self.dealias (alias ne self) && alias.hasSimpleKind - } /** The top type with the same kind as `self`. This is largest type capturing * the parameter shape of a type without looking at precise bounds. @@ -255,7 +241,7 @@ class TypeApplications(val self: Type) extends AnyVal { /** If self type is higher-kinded, its result type, otherwise NoType. * Note: The hkResult of an any-kinded type is again AnyKind. */ - def hkResult(using Context): Type = self.dealias match { + def hkResult(using Context): Type = self.dealias match case self: TypeRef => if (self.symbol == defn.AnyKindClass) self else self.info.hkResult case self: AppliedType => @@ -269,16 +255,14 @@ class TypeApplications(val self: Type) extends AnyVal { self.origin.hkResult case self: TypeProxy => self.superType.hkResult case _ => NoType - } /** Do self and other have the same kinds (not counting bounds and variances)? * Note: An any-kinded type "has the same kind" as any other type. */ - def hasSameKindAs(other: Type)(using Context): Boolean = { - def isAnyKind(tp: Type) = tp match { + def hasSameKindAs(other: Type)(using Context): Boolean = + def isAnyKind(tp: Type) = tp match case tp: TypeRef => tp.symbol == defn.AnyKindClass case _ => false - } val selfResult = self.hkResult val otherResult = other.hkResult isAnyKind(selfResult) || isAnyKind(otherResult) || @@ -287,18 +271,16 @@ class TypeApplications(val self: Type) extends AnyVal { selfResult.hasSameKindAs(otherResult) && self.typeParams.corresponds(other.typeParams)((sparam, oparam) => sparam.paramInfo.hasSameKindAs(oparam.paramInfo)) - else !otherResult.exists + else !otherResult.exists } - } /** Dealias type if it can be done without forcing the TypeRef's info */ - def safeDealias(using Context): Type = self match { + def safeDealias(using Context): Type = self match case self: TypeRef if self.denot.exists && self.symbol.isAliasType && !self.symbol.isProvisional => self.superType.stripTypeVar.safeDealias case _ => self - } /** Convert a type constructor `TC` which has type parameters `X1, ..., Xn` * to `[X1, ..., Xn] -> TC[X1, ..., Xn]`. @@ -312,15 +294,13 @@ class TypeApplications(val self: Type) extends AnyVal { if (isLambdaSub) self else EtaExpansion(self) /** Eta expand if `self` is a (non-lambda) class reference and `bound` is a higher-kinded type */ - def EtaExpandIfHK(bound: Type)(using Context): Type = { + def EtaExpandIfHK(bound: Type)(using Context): Type = val hkParams = bound.hkTypeParams if (hkParams.isEmpty) self - else self match { + else self match case self: TypeRef if self.symbol.isClass && self.typeParams.length == hkParams.length => EtaExpansion(self) case _ => self - } - } /** The type representing * @@ -330,18 +310,18 @@ class TypeApplications(val self: Type) extends AnyVal { * @param self = `T` * @param args = `U1,...,Un` */ - final def appliedTo(args: List[Type])(using Context): Type = { + final def appliedTo(args: List[Type])(using Context): Type = record("appliedTo") val typParams = self.typeParams val stripped = self.stripTypeVar val dealiased = stripped.safeDealias if (args.isEmpty || ctx.erasedTypes) self - else dealiased match { + else dealiased match case dealiased: HKTypeLambda => def tryReduce = - if (!args.exists(isBounds)) { - val followAlias = Config.simplifyApplications && { - dealiased.resType match { + if (!args.exists(isBounds)) + val followAlias = Config.simplifyApplications `&&`: + dealiased.resType match case AppliedType(tyconBody, dealiasedArgs) => // Reduction should not affect type inference when it's // just eta-reduction (ignoring variance annotations). @@ -350,8 +330,6 @@ class TypeApplications(val self: Type) extends AnyVal { dealiased.paramRefs == dealiasedArgs || defn.isCompiletimeAppliedType(tyconBody.typeSymbol) case _ => false - } - } if ((dealiased eq stripped) || followAlias) try val instantiated = dealiased.instantiate(args.mapConserve(_.boxedUnlessFun(self))) @@ -363,8 +341,7 @@ class TypeApplications(val self: Type) extends AnyVal { handleRecursive("try to instantiate", i"$dealiased[$args%, %]", ex) else AppliedType(self, args) - } - else dealiased.resType match { + else dealiased.resType match case AppliedType(tycon, args1) if tycon.safeDealias ne tycon => // In this case we should always dealias since we cannot handle // higher-kinded applications to wildcard arguments. @@ -376,7 +353,6 @@ class TypeApplications(val self: Type) extends AnyVal { val reduced = reducer(dealiased.resType) if (reducer.allReplaced) reduced else AppliedType(dealiased, args) - } tryReduce case dealiased: PolyType => dealiased.instantiate(args) @@ -396,8 +372,6 @@ class TypeApplications(val self: Type) extends AnyVal { dealiased case dealiased => AppliedType(self, args) - } - } final def appliedTo(arg: Type)(using Context): Type = appliedTo(arg :: Nil) final def appliedTo(arg1: Type, arg2: Type)(using Context): Type = appliedTo(arg1 :: arg2 :: Nil) @@ -410,31 +384,29 @@ class TypeApplications(val self: Type) extends AnyVal { * up hk type parameters matching the arguments. This is needed when unpickling * Scala2 files such as `scala.collection.generic.Mapfactory`. */ - final def safeAppliedTo(args: List[Type])(using Context): Type = self match { + final def safeAppliedTo(args: List[Type])(using Context): Type = self match case self: TypeRef if !self.symbol.isClass && self.symbol.isCompleting => AppliedType(self, args) case _ => appliedTo(args) - } /** Turns non-bounds types to type bounds. * A (possible lambda abstracted) match type is turned into a match alias. * Every other type is turned into a type alias */ - final def toBounds(using Context): TypeBounds = self match { + final def toBounds(using Context): TypeBounds = self match case self: TypeBounds => self // this can happen for wildcard args case _ => if (self.isMatch) MatchAlias(self) else TypeAlias(self) - } /** Translate a type of the form From[T] to either To[T] or To[? <: T] (if `wildcardArg` is set). Keep other types as they are. * `from` and `to` must be static classes, both with one type parameter, and the same variance. * Do the same for by name types => From[T] and => To[T] */ - def translateParameterized(from: ClassSymbol, to: ClassSymbol, wildcardArg: Boolean = false)(using Context): Type = self match { + def translateParameterized(from: ClassSymbol, to: ClassSymbol, wildcardArg: Boolean = false)(using Context): Type = self match case self @ ExprType(tp) => self.derivedExprType(tp.translateParameterized(from, to)) case _ => - if (self.derivesFrom(from)) { + if (self.derivesFrom(from)) def elemType(tp: Type): Type = tp.widenDealias match case tp: OrType => if tp.tp1.isBottomType then elemType(tp.tp2) @@ -445,9 +417,7 @@ class TypeApplications(val self: Type) extends AnyVal { val arg = elemType(self) val arg1 = if (wildcardArg) TypeBounds.upper(arg) else arg to.typeRef.appliedTo(arg1) - } else self - } /** If this is a repeated parameter `*T`, translate it to either `Seq[T]` or * `Array[? <: T]` depending on the value of `toArray`. @@ -525,20 +495,18 @@ class TypeApplications(val self: Type) extends AnyVal { /** If this is the image of a type argument; recover the type argument, * otherwise NoType. */ - final def argInfo(using Context): Type = self match { + final def argInfo(using Context): Type = self match case self: TypeAlias => self.alias case self: TypeBounds => self case _ => NoType - } /** If this is a type alias, its underlying type, otherwise the type itself */ - def dropAlias(using Context): Type = self match { + def dropAlias(using Context): Type = self match case TypeAlias(alias) => alias case _ => self - } /** The element type of a sequence or array */ - def elemType(using Context): Type = self.widenDealias match { + def elemType(using Context): Type = self.widenDealias match case defn.ArrayOf(elemtp) => elemtp case JavaArrayType(elemtp) => elemtp case tp: OrType if tp.tp1.isBottomType => tp.tp2.elemType @@ -547,5 +515,3 @@ class TypeApplications(val self: Type) extends AnyVal { self.baseType(defn.SeqClass) .orElse(self.baseType(defn.ArrayClass)) .argInfos.headOption.getOrElse(NoType) - } -} diff --git a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala index b84af998ffb6..d6a2b1612d11 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeComparer.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeComparer.scala @@ -27,7 +27,7 @@ import cc.{CapturingType, derivedCapturingType, CaptureSet, stripCapturing, isBo /** Provides methods to compare types. */ -class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling, PatternTypeConstrainer { +class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling, PatternTypeConstrainer: import TypeComparer._ Stats.record("TypeComparer") @@ -68,14 +68,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * constraint might still be retracted and the instantiation should * then be reversed. */ - def subtypeCheckInProgress: Boolean = { + def subtypeCheckInProgress: Boolean = val result = recCount > 0 - if (result) { + if (result) constr.println("*** needsGC ***") needsGc = true - } result - } /** For statistics: count how many isSubTypes are part of successful comparisons */ private var successCount = 0 @@ -107,11 +105,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling */ private def GADTusage(sym: Symbol): true = recordGadtUsageIf(!sym.owner.isConstructor) - private def recordGadtUsageIf(cond: Boolean): true = { + private def recordGadtUsageIf(cond: Boolean): true = if cond then GADTused = true true - } private def isBottom(tp: Type) = tp.widen.isRef(NothingClass) @@ -122,16 +119,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // Subtype testing `<:<` - def topLevelSubType(tp1: Type, tp2: Type): Boolean = { + def topLevelSubType(tp1: Type, tp2: Type): Boolean = if (tp2 eq NoType) return false if ((tp2 eq tp1) || (tp2 eq WildcardType)) return true try isSubType(tp1, tp2) - finally { + finally monitored = false if (Config.checkConstraintsSatisfiable) assert(isSatisfiable, constraint.show) - } - } def necessarySubType(tp1: Type, tp2: Type): Boolean = val saved = myNecessaryConstraintsOnly @@ -177,11 +172,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling */ val startSameTypeTrackingLevel = 3 - private inline def inFrozenGadtIf[T](cond: Boolean)(inline op: T): T = { + private inline def inFrozenGadtIf[T](cond: Boolean)(inline op: T): T = val savedFrozenGadt = frozenGadt frozenGadt ||= cond try op finally frozenGadt = savedFrozenGadt - } private inline def inFrozenGadtAndConstraint[T](inline op: T): T = inFrozenGadtIf(true)(inFrozenConstraint(op)) @@ -197,23 +191,19 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling comparedTypeLambdas += tl2 try op finally comparedTypeLambdas = saved - protected def isSubType(tp1: Type, tp2: Type, a: ApproxState): Boolean = { + protected def isSubType(tp1: Type, tp2: Type, a: ApproxState): Boolean = val savedApprox = approx val savedLeftRoot = leftRoot - if (a == ApproxState.Fresh) { + if (a == ApproxState.Fresh) this.approx = ApproxState.None this.leftRoot = tp1 - } else this.approx = a try recur(tp1, tp2) - catch { + catch case ex: Throwable => handleRecursive("subtype", i"$tp1 <:< $tp2", ex, weight = 2) - } - finally { + finally this.approx = savedApprox this.leftRoot = savedLeftRoot - } - } def isSubType(tp1: Type, tp2: Type): Boolean = isSubType(tp1, tp2, ApproxState.Fresh) @@ -234,10 +224,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * code would have two extra parameters for each of the many calls that go from * one sub-part of isSubType to another. */ - protected def recur(tp1: Type, tp2: Type): Boolean = trace(s"isSubType ${traceInfo(tp1, tp2)}${approx.show}", subtyping) { + protected def recur(tp1: Type, tp2: Type): Boolean = trace(s"isSubType ${traceInfo(tp1, tp2)}${approx.show}", subtyping): - def monitoredIsSubType = { - if (pendingSubTypes == null) { + def monitoredIsSubType = + if (pendingSubTypes == null) pendingSubTypes = util.HashSet[(Type, Type)]() report.log(s"!!! deep subtype recursion involving ${tp1.show} <:< ${tp2.show}, constraint = ${state.constraint.show}") report.log(s"!!! constraint = ${constraint.show}") @@ -247,12 +237,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling assert(!ctx.settings.YnoDeepSubtypes.value) if (Config.traceDeepSubTypeRecursions && !this.isInstanceOf[ExplainingTypeComparer]) report.log(explained(_.isSubType(tp1, tp2, approx))) - } // Eliminate LazyRefs before checking whether we have seen a type before - val normalize = new TypeMap { + val normalize = new TypeMap: val DerefLimit = 10 var derefCount = 0 - def apply(t: Type) = t match { + def apply(t: Type) = t match case t: LazyRef => // Dereference a lazyref to detect underlying matching types, but // be careful not to get into an infinite recursion. If recursion count @@ -264,20 +253,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tp case _ => mapOver(t) - } - } val p = (normalize(tp1), normalize(tp2)) - !pendingSubTypes.nn.contains(p) && { - try { + !pendingSubTypes.nn.contains(p) `&&`: + try pendingSubTypes.nn += p firstTry - } finally pendingSubTypes.nn -= p - } - } - def firstTry: Boolean = tp2 match { + def firstTry: Boolean = tp2 match case tp2: NamedType => def compareNamed(tp1: Type, tp2: NamedType): Boolean = val ctx = comparerContext @@ -300,13 +284,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => tp1 match case tp1: NamedType => - tp1.info match { + tp1.info match case info1: TypeAlias => if recur(info1.alias, tp2) then return true if tp1.asInstanceOf[TypeRef].canDropAlias && hasStablePrefix(tp2) then return false case _ => - } val sym2 = tp2.symbol var sym1 = tp1.symbol if (sym1.is(ModuleClass) && sym2.is(ModuleVal)) @@ -342,10 +325,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp2: TypeVar => recur(tp1, typeVarInstance(tp2)) case tp2: WildcardType => - def compareWild = tp2.optBounds match { + def compareWild = tp2.optBounds match case TypeBounds(_, hi) => recur(tp1, hi) case NoType => true - } compareWild case tp2: LazyRef => isBottom(tp1) || !tp2.evaluating && recur(tp1, tp2.ref) @@ -354,9 +336,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp2: AnnotatedType if !tp2.isRefining => recur(tp1, tp2.parent) case tp2: ThisType => - def compareThis = { + def compareThis = val cls2 = tp2.cls - tp1 match { + tp1 match case tp1: ThisType => tp1.cls eq cls2 case tp1: NamedType if cls2.is(Module) && cls2.eq(tp1.typeSymbol) => @@ -365,17 +347,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling secondTry case _ => secondTry - } - } compareThis case tp2: SuperType => - def compareSuper = tp1 match { + def compareSuper = tp1 match case tp1: SuperType => recur(tp1.thistpe, tp2.thistpe) && isSameType(tp1.supertpe, tp2.supertpe) case _ => secondTry - } compareSuper case AndType(tp21, tp22) => recur(tp1, tp21) && recur(tp1, tp22) @@ -383,52 +362,45 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if (tp21.stripTypeVar eq tp22.stripTypeVar) recur(tp1, tp21) else secondTry case TypeErasure.ErasedValueType(tycon1, underlying2) => - def compareErasedValueType = tp1 match { + def compareErasedValueType = tp1 match case TypeErasure.ErasedValueType(tycon2, underlying1) => (tycon1.symbol eq tycon2.symbol) && isSubType(underlying1, underlying2) case _ => secondTry - } compareErasedValueType case ConstantType(v2) => - tp1 match { + tp1 match case ConstantType(v1) => v1.value == v2.value && recur(v1.tpe, v2.tpe) case _ => secondTry - } case tp2: AnyConstantType => if (tp2.tpe.exists) recur(tp1, tp2.tpe) - else tp1 match { + else tp1 match case tp1: ConstantType => tp2.tpe = tp1 true case _ => secondTry - } case _: FlexType => true case _ => secondTry - } - def secondTry: Boolean = tp1 match { + def secondTry: Boolean = tp1 match case tp1: NamedType => - tp1.info match { + tp1.info match case info1: TypeAlias => if (recur(info1.alias, tp2)) return true if (tp1.prefix.isStable) return tryLiftedToThis1 case _ => if (tp1 eq NothingType) || isBottom(tp1) then return true - } thirdTry case tp1: TypeParamRef => - def flagNothingBound = { - if (!frozenConstraint && isBottom(tp2) && state.isGlobalCommittable) { + def flagNothingBound = + if (!frozenConstraint && isBottom(tp2) && state.isGlobalCommittable) def msg = s"!!! instantiated to Nothing: $tp1, constraint = ${constraint.show}" if (Config.failOnInstantiationToNothing) assert(false, msg) else report.log(msg) - } true - } def compareTypeParamRef = assumedTrue(tp1) || tp2.dealias.match @@ -438,31 +410,28 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling || isSubTypeWhenFrozen(bounds(tp1).hi.boxed, tp2) || (if canConstrain(tp1) && !approx.high then addConstraint(tp1, tp2, fromBelow = false) && flagNothingBound - else thirdTry) + else thirdTry) compareTypeParamRef case tp1: ThisType => val cls1 = tp1.cls - tp2 match { + tp2 match case tp2: TermRef if cls1.is(Module) && cls1.eq(tp2.typeSymbol) => cls1.isStaticOwner || recur(cls1.owner.thisType, tp2.prefix) || thirdTry case _ => thirdTry - } case tp1: SkolemType => - tp2 match { + tp2 match case tp2: SkolemType if !ctx.phase.isTyper && recur(tp1.info, tp2.info) => true case _ => thirdTry - } case tp1: TypeVar => recur(typeVarInstance(tp1), tp2) case tp1: WildcardType => - def compareWild = tp1.optBounds match { + def compareWild = tp1.optBounds match case bounds: TypeBounds => recur(bounds.lo, tp2) case _ => true - } compareWild case tp1: LazyRef => // If `tp1` is in train of being evaluated, don't force it @@ -483,7 +452,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling && (tp1.widenSingletons ne tp1) && inFrozenGadtAndConstraint(recur(tp1.widenSingletons, tp2)) - def joinOK = tp2.dealiasKeepRefiningAnnots match { + def joinOK = tp2.dealiasKeepRefiningAnnots match case tp2: AppliedType if !tp2.tycon.typeSymbol.isClass => // If we apply the default algorithm for `A[X] | B[Y] <: C[Z]` where `C` is a // type parameter, we will instantiate `C` to `A` and then fail when comparing @@ -492,7 +461,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling inFrozenGadtAndConstraint(recur(tp1.join, tp2)) case _ => false - } /** Mark toplevel type vars in `tp2` as hard in the current constraint */ def hardenTypeVars(tp2: Type): Unit = tp2.dealiasKeepRefiningAnnots match @@ -548,9 +516,8 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling true case _ => thirdTry - } - def thirdTryNamed(tp2: NamedType): Boolean = tp2.info match { + def thirdTryNamed(tp2: NamedType): Boolean = tp2.info match case info2: TypeBounds => def compareGADT: Boolean = tp2.symbol.onGadtBounds(gbounds2 => @@ -575,7 +542,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => val cls2 = tp2.symbol if (cls2.isClass) - if (cls2.typeParams.isEmpty) { + if (cls2.typeParams.isEmpty) if (cls2 eq AnyKindClass) return true if (isBottom(tp1)) return true if (tp1.isLambdaSub) return false @@ -585,16 +552,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if cls2 eq AnyClass then return true if cls2 == defn.SingletonClass && tp1.isStable then return true return tryBaseType(cls2) - } - else if (cls2.is(JavaDefined)) { + else if (cls2.is(JavaDefined)) // If `cls2` is parameterized, we are seeing a raw type, so we need to compare only the symbol val base = nonExprBaseType(tp1, cls2) if (base.typeSymbol == cls2) return true - } else if tp1.typeParams.nonEmpty && !tp1.isAnyKind then return recur(tp1, EtaExpansion(tp2)) fourthTry - } def compareTypeParamRef(tp2: TypeParamRef): Boolean = assumedTrue(tp2) @@ -615,10 +579,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => false || (if canConstrain(tp2) && !approx.low then addConstraint(tp2, tp1.widenExpr, fromBelow = true) - else fourthTry) + else fourthTry) } - def thirdTry: Boolean = tp2 match { + def thirdTry: Boolean = tp2 match case tp2 @ AppliedType(tycon2, args2) => compareAppliedType2(tp2, tycon2, args2) case tp2: NamedType => @@ -694,7 +658,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling compareRefined case tp2: RecType => - def compareRec = tp1.safeDealias match { + def compareRec = tp1.safeDealias match case tp1: RecType => val rthis1 = tp1.recThis recur(tp1.parent, tp2.parent.substRecThis(tp2, rthis1)) @@ -702,10 +666,9 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => val tp1stable = ensureStableSingleton(tp1) recur(fixRecs(tp1stable, tp1stable.widenExpr), tp2.parent.substRecThis(tp2, tp1stable)) - } compareRec case tp2: HKTypeLambda => - def compareTypeLambda: Boolean = tp1.stripTypeVar match { + def compareTypeLambda: Boolean = tp1.stripTypeVar match case tp1: HKTypeLambda => /* Don't compare bounds of lambdas under language:Scala2, or t2994 will fail. * The issue is that, logically, bounds should compare contravariantly, @@ -724,21 +687,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling migrateTo3 || tp1.typeParams.corresponds(tp2.typeParams)((tparam1, tparam2) => isSubType(tparam2.paramInfo.subst(tp2, tp1), tparam1.paramInfo)) - comparingTypeLambdas(tp1, tp2) { + comparingTypeLambdas(tp1, tp2): val variancesOK = variancesConform(tp1.typeParams, tp2.typeParams) variancesOK && boundsOK && isSubType(tp1.resType, tp2.resType.subst(tp2, tp1)) - } case _ => val tparams1 = tp1.typeParams if (tparams1.nonEmpty) return recur(tp1.EtaExpand(tparams1), tp2) || fourthTry - tp2 match { + tp2 match case EtaExpansion(tycon2: TypeRef) if tycon2.symbol.isClass && tycon2.symbol.is(JavaDefined) => recur(tp1, tycon2) || fourthTry case _ => fourthTry - } - } compareTypeLambda case tp2 @ OrType(tp21, tp22) => compareAtoms(tp1, tp2) match @@ -770,16 +730,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // `|' types to the right of <: are problematic, because // we have to choose one constraint set or another, which might cut off // solutions. The rewriting delays the point where we have to choose. - tp21 match { + tp21 match case AndType(tp211, tp212) => return recur(tp1, OrType(tp211, tp22, tp2.isSoft)) && recur(tp1, OrType(tp212, tp22, tp2.isSoft)) case _ => - } - tp22 match { + tp22 match case AndType(tp221, tp222) => return recur(tp1, OrType(tp21, tp221, tp2.isSoft)) && recur(tp1, OrType(tp21, tp222, tp2.isSoft)) case _ => - } either(recur(tp1, tp21), recur(tp1, tp22)) || fourthTry case tp2: MatchType => val reduced = tp2.reduced @@ -788,28 +746,25 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else fourthTry case tp2: MethodType => - def compareMethod = tp1 match { + def compareMethod = tp1 match case tp1: MethodType => (tp1.signature consistentParams tp2.signature) && matchingMethodParams(tp1, tp2) && (!tp2.isImplicitMethod || tp1.isImplicitMethod) && isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1)) case _ => false - } compareMethod case tp2: PolyType => - def comparePoly = tp1 match { + def comparePoly = tp1 match case tp1: PolyType => - comparingTypeLambdas(tp1, tp2) { + comparingTypeLambdas(tp1, tp2): (tp1.signature consistentParams tp2.signature) && matchingPolyParams(tp1, tp2) && isSubType(tp1.resultType, tp2.resultType.subst(tp2, tp1)) - } case _ => false - } comparePoly case tp2 @ ExprType(restpe2) => - def compareExpr = tp1 match { + def compareExpr = tp1 match // We allow ()T to be a subtype of => T. // We need some subtype relationship between them so that e.g. // def toString and def toString() don't clash when seen @@ -819,20 +774,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp1 @ MethodType(Nil) => isSubType(tp1.resultType, restpe2) case tp1 @ ExprType(restpe1) => isSubType(restpe1, restpe2) case _ => fourthTry - } compareExpr case tp2 @ TypeBounds(lo2, hi2) => - def compareTypeBounds = tp1 match { + def compareTypeBounds = tp1 match case tp1 @ TypeBounds(lo1, hi1) => ((lo2 eq NothingType) || isSubType(lo2, lo1)) && ((hi2 eq AnyType) && !hi1.isLambdaSub - || (hi2 eq AnyKindType) - || isSubType(hi1, hi2)) + || (hi2 eq AnyKindType) + || isSubType(hi1, hi2)) case tp1: ClassInfo => tp2 contains tp1 case _ => false - } compareTypeBounds case CapturingType(parent2, refs2) => def compareCapturing = @@ -854,18 +807,16 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling (tp1.derivesAnnotWith(tp2.annot.sameAnnotation) || tp1.isBottomType) && recur(tp1, tp2.parent) case ClassInfo(pre2, cls2, _, _, _) => - def compareClassInfo = tp1 match { + def compareClassInfo = tp1 match case ClassInfo(pre1, cls1, _, _, _) => (cls1 eq cls2) && isSubType(pre1, pre2) case _ => false - } compareClassInfo case _ => fourthTry - } - def tryBaseType(cls2: Symbol) = { + def tryBaseType(cls2: Symbol) = val base = nonExprBaseType(tp1, cls2).boxedIfTypeParam(tp1.typeSymbol) if base.exists && (base ne tp1) && (!caseLambda.exists || canWidenAbstract || tp1.widen.underlyingClassRef(refinementOK = true).exists) @@ -877,11 +828,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // expands to a match type. In this case, we should try to reduce the type // and compare the redux. This is done in fourthTry else fourthTry - } - def fourthTry: Boolean = tp1 match { + def fourthTry: Boolean = tp1 match case tp1: TypeRef => - tp1.info match { + tp1.info match case info1 @ TypeBounds(lo1, hi1) => def compareGADT = tp1.symbol.onGadtBounds(gbounds1 => @@ -915,7 +865,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val sym1 = tp1.symbol (sym1 eq NothingClass) && tp2.isValueTypeOrLambda || (sym1 eq NullClass) && isNullable(tp2) - } case tp1 @ AppliedType(tycon1, args1) => compareAppliedType1(tp1, tycon1, args1) case tp1: SingletonType => @@ -943,16 +892,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp1: RecType => isNewSubType(tp1.parent) case tp1: HKTypeLambda => - def compareHKLambda = tp1 match { + def compareHKLambda = tp1 match case EtaExpansion(tycon1: TypeRef) if tycon1.symbol.isClass && tycon1.symbol.is(JavaDefined) => // It's a raw type that was mistakenly eta-expanded to a hk-type. // This can happen because we do not cook types coming from Java sources recur(tycon1, tp2) - case _ => tp2 match { - case tp2: HKTypeLambda => false // this case was covered in thirdTry - case _ => tp2.typeParams.hasSameLengthAs(tp1.paramRefs) && isSubType(tp1.resultType, tp2.appliedTo(tp1.paramRefs)) - } - } + case _ => tp2 match + case tp2: HKTypeLambda => false // this case was covered in thirdTry + case _ => tp2.typeParams.hasSameLengthAs(tp1.paramRefs) && isSubType(tp1.resultType, tp2.appliedTo(tp1.paramRefs)) compareHKLambda case AndType(tp11, tp12) => val tp2a = tp2.dealiasKeepRefiningAnnots @@ -964,41 +911,36 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // `&' types to the left of <: are problematic, because // we have to choose one constraint set or another, which might cut off // solutions. The rewriting delays the point where we have to choose. - tp11 match { + tp11 match case OrType(tp111, tp112) => return recur(AndType(tp111, tp12), tp2) && recur(AndType(tp112, tp12), tp2) case _ => - } - tp12 match { + tp12 match case OrType(tp121, tp122) => return recur(AndType(tp11, tp121), tp2) && recur(AndType(tp11, tp122), tp2) case _ => - } val tp1norm = simplifyAndTypeWithFallback(tp11, tp12, tp1) if (tp1 ne tp1norm) recur(tp1norm, tp2) else either(recur(tp11, tp2), recur(tp12, tp2)) case tp1: MatchType => - def compareMatch = tp2 match { + def compareMatch = tp2 match case tp2: MatchType => isSameType(tp1.scrutinee, tp2.scrutinee) && tp1.cases.corresponds(tp2.cases)(isSubType) case _ => false - } recur(tp1.underlying, tp2) || compareMatch case tp1: AnnotatedType if tp1.isRefining => isNewSubType(tp1.parent) case JavaArrayType(elem1) => - def compareJavaArray = tp2 match { + def compareJavaArray = tp2 match case JavaArrayType(elem2) => isSubType(elem1, elem2) case _ => tp2.isAnyRef - } compareJavaArray case tp1: ExprType if ctx.phaseId > gettersPhase.id => // getters might have converted T to => T, need to compensate. recur(tp1.widenExpr, tp2) case _ => false - } /** When called from `pre1.A <:< pre2.A` does `pre1` relate to `pre2` so that * the subtype test is true? This is the case if @@ -1031,7 +973,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def isSubPrefix(pre1: Type, pre2: Type): Boolean = def samePkg(sym1: Symbol, sym2: Symbol) = sym2.is(Package) && sym1.isPackageObject && sym1.owner == sym2.moduleClass - || sym1.is(Package) && sym2.isPackageObject && sym2.owner == sym1.moduleClass + || sym1.is(Package) && sym2.isPackageObject && sym2.owner == sym1.moduleClass pre1 match case pre1: ThisType => pre2 match @@ -1120,10 +1062,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val otherArgs = other.args val d = otherArgs.length - args.length - d >= 0 && { + d >= 0 `&&`: val tparams = tycon.typeParams val remainingTparams = otherTycon.typeParams.drop(d) - variancesConform(remainingTparams, tparams) && { + variancesConform(remainingTparams, tparams) `&&`: val adaptedTycon = if d > 0 then val initialArgs = otherArgs.take(d) @@ -1142,20 +1084,18 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling otherTycon (assumedTrue(tycon) || directionalIsSubType(tycon, adaptedTycon)) && directionalRecur(adaptedTycon.appliedTo(args), other) - } - } end compareAppliedTypeParamRef /** Subtype test for the hk application `tp2 = tycon2[args2]`. */ - def compareAppliedType2(tp2: AppliedType, tycon2: Type, args2: List[Type]): Boolean = { + def compareAppliedType2(tp2: AppliedType, tycon2: Type, args2: List[Type]): Boolean = val tparams = tycon2.typeParams if (tparams.isEmpty) return false // can happen for ill-typed programs, e.g. neg/tcpoly_overloaded.scala /** True if `tp1` and `tp2` have compatible type constructors and their * corresponding arguments are subtypes relative to their variance (see `isSubArgs`). */ - def isMatchingApply(tp1: Type): Boolean = tp1.widen match { + def isMatchingApply(tp1: Type): Boolean = tp1.widen match case tp1 @ AppliedType(tycon1, args1) => // We intentionally do not automatically dealias `tycon1` or `tycon2` here. // `TypeApplications#appliedTo` already takes care of dealiasing type @@ -1202,13 +1142,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // is weaker than the first, we keep it in place of the first. // Note that if the isSubArgs test fails, we will proceed anyway by // dealising by doing a compareLower. - def loop(tycon1: Type, args1: List[Type]): Boolean = tycon1 match { + def loop(tycon1: Type, args1: List[Type]): Boolean = tycon1 match case tycon1: TypeParamRef => (tycon1 == tycon2 || - canConstrain(tycon1) && isSubType(tycon1, tycon2)) && + canConstrain(tycon1) && isSubType(tycon1, tycon2)) && isSubArgs(args1, args2, tp1, tparams) case tycon1: TypeRef => - tycon2 match { + tycon2 match case tycon2: TypeRef => val tycon1sym = tycon1.symbol val tycon2sym = tycon2.symbol @@ -1232,7 +1172,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling || tycon1sym.byGadtBounds(b => isSubTypeWhenFrozen(b.hi, tycon2)) || tycon2sym.byGadtBounds(b => isSubTypeWhenFrozen(tycon1, b.lo)) || byGadtOrdering - ) && { + ) `&&`: // There are two cases in which we can assume injectivity. // First we check if either sym is a class. // Then: @@ -1245,55 +1185,46 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling (tycon1sym.isClass || tycon2sym.isClass) && (!touchedGADTs || gadtIsInstantiated) - inFrozenGadtIf(!tyconIsInjective) { + inFrozenGadtIf(!tyconIsInjective): if tycon1sym == tycon2sym && tycon1sym.isAliasType then val preConstraint = constraint isSubArgs(args1, args2, tp1, tparams) && tryAlso(preConstraint, recur(tp1.superTypeNormalized, tp2.superTypeNormalized)) else isSubArgs(args1, args2, tp1, tparams) - } - } res && recordGadtUsageIf(touchedGADTs) case _ => false - } case tycon1: TypeVar => loop(tycon1.underlying, args1) case tycon1: AnnotatedType if !tycon1.isRefining => loop(tycon1.underlying, args1) case _ => false - } loop(tycon1, args1) case _ => false - } /** `param2` can be instantiated to a type application prefix of the LHS * or to a type application prefix of one of the LHS base class instances * and the resulting type application is a supertype of `tp1`. */ - def canInstantiate(tycon2: TypeParamRef): Boolean = { - def appOK(tp1base: Type) = tp1base match { + def canInstantiate(tycon2: TypeParamRef): Boolean = + def appOK(tp1base: Type) = tp1base match case tp1base: AppliedType => compareAppliedTypeParamRef(tycon2, args2, tp1base, fromBelow = true) case _ => false - } val tp1w = tp1.widen - appOK(tp1w) || tp1w.typeSymbol.isClass && { + appOK(tp1w) || tp1w.typeSymbol.isClass `&&`: val classBounds = tycon2.classSymbols - def liftToBase(bcs: List[ClassSymbol]): Boolean = bcs match { + def liftToBase(bcs: List[ClassSymbol]): Boolean = bcs match case bc :: bcs1 => classBounds.exists(bc.derivesFrom) && appOK(nonExprBaseType(tp1, bc)) || liftToBase(bcs1) case _ => false - } liftToBase(tp1w.baseClasses) - } - } /** Fall back to comparing either with `fourthTry` or against the lower * approximation of the rhs. @@ -1329,7 +1260,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case _ => false } && recordGadtUsageIf(true) - tycon2 match { + tycon2 match case param2: TypeParamRef => isMatchingApply(tp1) || canConstrain(param2) && canInstantiate(param2) || @@ -1338,7 +1269,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling isMatchingApply(tp1) || byGadtBounds || defn.isCompiletimeAppliedType(tycon2.symbol) && compareCompiletimeAppliedType(tp2, tp1, fromBelow = true) || { - tycon2.info match { + tycon2.info match case info2: TypeBounds => compareLower(info2, tyconIsTypeRef = true) case info2: ClassInfo => @@ -1347,7 +1278,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tryBaseType(info2.cls) case _ => fourthTry - } } || tryLiftedToThis2 case tv: TypeVar => @@ -1361,20 +1291,17 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling fallback(tycon2.lowerBound) case _ => false - } - } /** Subtype test for the application `tp1 = tycon1[args1]`. */ def compareAppliedType1(tp1: AppliedType, tycon1: Type, args1: List[Type]): Boolean = - tycon1 match { + tycon1 match case param1: TypeParamRef => - def canInstantiate = tp2 match { + def canInstantiate = tp2 match case tp2base: AppliedType => compareAppliedTypeParamRef(param1, args1, tp2base, fromBelow = false) case _ => false - } canConstrain(param1) && canInstantiate || isSubType(bounds(param1).hi.applyIfParameterized(args1), tp2, approx.addLow) case tycon1: TypeRef => @@ -1395,41 +1322,35 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling recur(tp1.superTypeNormalized, tp2) case _ => false - } /** Compare `tp` of form `S[arg]` with `other`, via ">:>" if fromBelow is true, "<:<" otherwise. * If `arg` is a Nat constant `n`, proceed with comparing `n + 1` and `other`. * Otherwise, if `other` is a Nat constant `n`, proceed with comparing `arg` and `n - 1`. */ - def compareS(tp: AppliedType, other: Type, fromBelow: Boolean): Boolean = tp.args match { + def compareS(tp: AppliedType, other: Type, fromBelow: Boolean): Boolean = tp.args match case arg :: Nil => - natValue(arg) match { + natValue(arg) match case Some(n) if n != Int.MaxValue => val succ = ConstantType(Constant(n + 1)) if (fromBelow) recur(other, succ) else recur(succ, other) case none => - natValue(other) match { + natValue(other) match case Some(n) if n > 0 => val pred = ConstantType(Constant(n - 1)) if (fromBelow) recur(pred, arg) else recur(arg, pred) case none => false - } - } case _ => false - } /** Compare `tp` of form `tycon[...args]`, where `tycon` is a scala.compiletime type, * with `other` via ">:>" if fromBelow is true, "<:<" otherwise. * Delegates to compareS if `tycon` is scala.compiletime.S. Otherwise, constant folds if possible. */ - def compareCompiletimeAppliedType(tp: AppliedType, other: Type, fromBelow: Boolean): Boolean = { + def compareCompiletimeAppliedType(tp: AppliedType, other: Type, fromBelow: Boolean): Boolean = if (defn.isCompiletime_S(tp.tycon.typeSymbol)) compareS(tp, other, fromBelow) - else { + else val folded = tp.tryCompiletimeConstantFold if (fromBelow) recur(other, folded) else recur(folded, other) - } - } /** Like tp1 <:< tp2, but returns false immediately if we know that * the case was covered previously during subtyping. @@ -1443,15 +1364,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def isSubApproxHi(tp1: Type, tp2: Type): Boolean = tp1.eq(tp2) || tp2.ne(NothingType) && isSubType(tp1, tp2, approx.addHigh) - def tryLiftedToThis1: Boolean = { + def tryLiftedToThis1: Boolean = val tp1a = liftToThis(tp1) (tp1a ne tp1) && recur(tp1a, tp2) - } - def tryLiftedToThis2: Boolean = { + def tryLiftedToThis2: Boolean = val tp2a = liftToThis(tp2) (tp2a ne tp2) && recur(tp1, tp2a) - } // begin recur if tp2 eq NoType then false @@ -1480,7 +1399,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling restore() successCount = savedSuccessCount throw ex - } private def nonExprBaseType(tp: Type, cls: Symbol)(using Context): Type = if tp.isInstanceOf[ExprType] then NoType @@ -1491,7 +1409,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * Note: It would be legal to do the lifting also if M does not contain opaque types, * but in this case the retries in tryLiftedToThis would be redundant. */ - private def liftToThis(tp: Type): Type = { + private def liftToThis(tp: Type): Type = def findEnclosingThis(moduleClass: Symbol, from: Symbol): Type = if ((from.owner eq moduleClass) && from.isPackageObject && from.is(Opaque)) from.thisType @@ -1500,7 +1418,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else if (from eq NoSymbol) tp else findEnclosingThis(moduleClass, from.owner) - tp match { + tp match case tp: TermRef if tp.symbol.is(Module) => findEnclosingThis(tp.symbol.moduleClass, ctx.owner) case tp: TypeRef => @@ -1518,25 +1436,20 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling if (parent1 ne tp.parent) tp.derivedAnnotatedType(parent1, tp.annot) else tp case _ => tp - } - } /** Optionally, the `n` such that `tp <:< ConstantType(Constant(n: Int))` */ - def natValue(tp: Type): Option[Int] = constValue(tp) match { + def natValue(tp: Type): Option[Int] = constValue(tp) match case Some(Constant(n: Int)) if n >= 0 => Some(n) case _ => None - } /** Optionally, the constant `c` such that `tp <:< ConstantType(c)` */ - def constValue(tp: Type): Option[Constant] = { + def constValue(tp: Type): Option[Constant] = val ct = new AnyConstantType if (isSubTypeWhenFrozen(tp, ct)) - ct.tpe match { + ct.tpe match case ConstantType(c) => Some(c) case _ => None - } else None - } /** If both `tp1` and `tp2` have atoms information, compare the atoms * in a Some, otherwise None. @@ -1598,7 +1511,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * @param tp1 The applied type containing `args1` * @param tparams2 The type parameters of the type constructor applied to `args2` */ - def isSubArgs(args1: List[Type], args2: List[Type], tp1: Type, tparams2: List[ParamInfo]): Boolean = { + def isSubArgs(args1: List[Type], args2: List[Type], tp1: Type, tparams2: List[ParamInfo]): Boolean = /** The bounds of parameter `tparam`, where all references to type paramneters * are replaced by corresponding arguments (or their approximations in the case of * wildcard arguments). @@ -1637,7 +1550,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * skolemizations, which are more expensive . And, besides, capture conversion on * paths is less intrusive than skolemization. */ - def compareCaptured(arg1: TypeBounds, arg2: Type) = tparam match { + def compareCaptured(arg1: TypeBounds, arg2: Type) = tparam match case tparam: Symbol => val leftr = leftRoot.nn if (leftr.isStable || ctx.isAfterTyper || ctx.mode.is(Mode.TypevarsMissContext)) @@ -1658,18 +1571,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false case _ => false - } def isSubArg(arg1: Type, arg2: Type): Boolean = arg2 match case arg2: TypeBounds => - val arg1norm = arg1 match { + val arg1norm = arg1 match case arg1: TypeBounds => - tparam match { + tparam match case tparam: Symbol => arg1 & paramBounds(tparam) case _ => arg1 // This case can only arise when a hk-type is illegally instantiated with a wildcard - } case _ => arg1 - } arg2.contains(arg1norm) case ExprType(arg2res) if ctx.phaseId > elimByNamePhase.id && !ctx.erasedTypes @@ -1701,16 +1611,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling } && recurArgs(args1.tail, args2.tail, tparams2.tail) recurArgs(args1, args2, tparams2) - } /** Test whether `tp1` has a base type of the form `B[T1, ..., Tn]` where * - `B` derives from one of the class symbols of `tp2`, * - the type parameters of `B` match one-by-one the variances of `tparams`, * - `B` satisfies predicate `p`. */ - private def testLifted(tp1: Type, tp2: Type, tparams: List[TypeParamInfo], p: Type => Boolean): Boolean = { + private def testLifted(tp1: Type, tp2: Type, tparams: List[TypeParamInfo], p: Type => Boolean): Boolean = val classBounds = tp2.classSymbols - def recur(bcs: List[ClassSymbol]): Boolean = bcs match { + def recur(bcs: List[ClassSymbol]): Boolean = bcs match case bc :: bcs1 => (classBounds.exists(bc.derivesFrom) && variancesConform(bc.typeParams, tparams) && @@ -1719,15 +1628,13 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling recur(bcs1)) case nil => false - } recur(tp1.baseClasses) - } /** Replace any top-level recursive type `{ z => T }` in `tp` with * `[z := anchor]T`. */ - private def fixRecs(anchor: SingletonType, tp: Type): Type = { - def fix(tp: Type): Type = tp.stripTypeVar match { + private def fixRecs(anchor: SingletonType, tp: Type): Type = + def fix(tp: Type): Type = tp.stripTypeVar match case tp: RecType => fix(tp.parent).substRecThis(tp, anchor) case tp @ RefinedType(parent, rname, rinfo) => tp.derivedRefinedType(fix(parent), rname, rinfo) case tp: TypeParamRef => fixOrElse(bounds(tp).hi, tp) @@ -1735,13 +1642,10 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp: AndType => tp.derivedAndType(fix(tp.tp1), fix(tp.tp2)) case tp: OrType => tp.derivedOrType (fix(tp.tp1), fix(tp.tp2)) case tp => tp - } - def fixOrElse(tp: Type, fallback: Type) = { + def fixOrElse(tp: Type, fallback: Type) = val tp1 = fix(tp) if (tp1 ne tp) tp1 else fallback - } fix(tp) - } /** Returns true iff the result of evaluating either `op1` or `op2` is true and approximates resulting constraints. * @@ -1925,7 +1829,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * rebase both itself and the member info of `tp` on a freshly created skolem type. */ def hasMatchingMember(name: Name, tp1: Type, tp2: RefinedType): Boolean = - trace(i"hasMatchingMember($tp1 . $name :? ${tp2.refinedInfo}), mbr: ${tp1.member(name).info}", subtyping) { + trace(i"hasMatchingMember($tp1 . $name :? ${tp2.refinedInfo}), mbr: ${tp1.member(name).info}", subtyping): // If the member is an abstract type and the prefix is a path, compare the member itself // instead of its bounds. This case is needed situations like: @@ -1940,17 +1844,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // C[?] <: C[TV] // // where TV is a type variable. See i2397.scala for an example of the latter. - def matchAbstractTypeMember(info1: Type): Boolean = info1 match { + def matchAbstractTypeMember(info1: Type): Boolean = info1 match case TypeBounds(lo, hi) if lo ne hi => - tp2.refinedInfo match { + tp2.refinedInfo match case rinfo2: TypeBounds if tp1.isStable => val ref1 = tp1.widenExpr.select(name) isSubType(rinfo2.lo, ref1) && isSubType(ref1, rinfo2.hi) case _ => false - } case _ => false - } // An additional check for type member matching: If the refinement of the // supertype `tp2` does not refer to a member symbol defined in the parent of `tp2`. @@ -1996,14 +1898,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tp1.member(name) match // inlined hasAltWith for performance case mbr: SingleDenotation => qualifies(mbr) case mbr => mbr hasAltWith qualifies - } - final def ensureStableSingleton(tp: Type): SingletonType = tp.stripTypeVar match { + final def ensureStableSingleton(tp: Type): SingletonType = tp.stripTypeVar match case tp: SingletonType if tp.isStable => tp case tp: ValueType => SkolemType(tp) case tp: TypeProxy => ensureStableSingleton(tp.superType) case tp => assert(ctx.reporter.errorsReported); SkolemType(tp) - } /** Skip refinements in `tp2` which match corresponding refinements in `tp1`. * "Match" means: @@ -2013,14 +1913,12 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * - neither refinement refers back to the refined type via a refined this. * @return The parent type of `tp2` after skipping the matching refinements. */ - private def skipMatching(tp1: Type, tp2: RefinedType): Type = tp1 match { + private def skipMatching(tp1: Type, tp2: RefinedType): Type = tp1 match case tp1 @ RefinedType(parent1, name1, rinfo1: TypeAlias) if name1 == tp2.refinedName => - tp2.parent match { + tp2.parent match case parent2: RefinedType => skipMatching(parent1, parent2) case parent2 => parent2 - } case _ => tp2 - } /** Are refinements in `tp1` pairwise subtypes of the refinements of `tp2` * up to parent type `limit`? @@ -2040,42 +1938,38 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * is some combination of TypeRefs that point to classes, where the * combiners are AppliedTypes, RefinedTypes, RecTypes, And/Or-Types or AnnotatedTypes. */ - private def isCovered(tp: Type): Boolean = tp.dealiasKeepRefiningAnnots.stripTypeVar match { + private def isCovered(tp: Type): Boolean = tp.dealiasKeepRefiningAnnots.stripTypeVar match case tp: TypeRef => tp.symbol.isClass && tp.symbol != NothingClass && tp.symbol != NullClass case tp: AppliedType => isCovered(tp.tycon) case tp: RefinedOrRecType => isCovered(tp.parent) case tp: AndType => isCovered(tp.tp1) && isCovered(tp.tp2) case tp: OrType => isCovered(tp.tp1) && isCovered(tp.tp2) case _ => false - } /** Defer constraining type variables when compared against prototypes */ - def isMatchedByProto(proto: ProtoType, tp: Type): Boolean = tp.stripTypeVar match { + def isMatchedByProto(proto: ProtoType, tp: Type): Boolean = tp.stripTypeVar match case tp: TypeParamRef if constraint contains tp => true case _ => proto.isMatchedBy(tp, keepConstraint = true) - } /** Narrow gadt.bounds for the type parameter referenced by `tr` to include * `bound` as an upper or lower bound (which depends on `isUpper`). * Test that the resulting bounds are still satisfiable. */ - private def narrowGADTBounds(tr: NamedType, bound: Type, approx: ApproxState, isUpper: Boolean): Boolean = { + private def narrowGADTBounds(tr: NamedType, bound: Type, approx: ApproxState, isUpper: Boolean): Boolean = val boundImprecise = approx.high || approx.low - ctx.mode.is(Mode.GadtConstraintInference) && !frozenGadt && !frozenConstraint && !boundImprecise && { + ctx.mode.is(Mode.GadtConstraintInference) && !frozenGadt && !frozenConstraint && !boundImprecise `&&`: val tparam = tr.symbol gadts.println(i"narrow gadt bound of $tparam: ${tparam.info} from ${if (isUpper) "above" else "below"} to $bound ${bound.toString} ${bound.isRef(tparam)}") if (bound.isRef(tparam)) false else ctx.gadtState.rollbackGadtUnless(gadtAddBound(tparam, bound, isUpper)) - } - } // Tests around `matches` /** A function implementing `tp1` matches `tp2`. */ - final def matchesType(tp1: Type, tp2: Type, relaxed: Boolean): Boolean = tp1.widen match { + final def matchesType(tp1: Type, tp2: Type, relaxed: Boolean): Boolean = tp1.widen match case tp1: MethodType => - tp2.widen match { + tp2.widen match case tp2: MethodType => // implicitness is ignored when matching matchingMethodParams(tp1, tp2) && @@ -2083,17 +1977,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp2 => relaxed && tp1.paramNames.isEmpty && matchesType(tp1.resultType, tp2, relaxed) - } case tp1: PolyType => - tp2.widen match { + tp2.widen match case tp2: PolyType => tp1.paramNames.hasSameLengthAs(tp2.paramNames) && matchesType(tp1.resultType, tp2.resultType.subst(tp2, tp1), relaxed) case _ => false - } case _ => - tp2.widen match { + tp2.widen match case _: PolyType => false case tp2: MethodType => @@ -2101,18 +1993,16 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling matchesType(tp1, tp2.resultType, relaxed) case tp2 => relaxed || isSameType(tp1, tp2) - } - } /** Do the parameter types of `tp1` and `tp2` match in a way that allows `tp1` * to override `tp2` ? Two modes: precise or not. * If `precise` is set (which is the default) this is the case if they're pairwise `=:=`. * Otherwise parameters in `tp2` must be subtypes of corresponding parameters in `tp1`. */ - def matchingMethodParams(tp1: MethodType, tp2: MethodType, precise: Boolean = true): Boolean = { - def loop(formals1: List[Type], formals2: List[Type]): Boolean = formals1 match { + def matchingMethodParams(tp1: MethodType, tp2: MethodType, precise: Boolean = true): Boolean = + def loop(formals1: List[Type], formals2: List[Type]): Boolean = formals1 match case formal1 :: rest1 => - formals2 match { + formals2 match case formal2 :: rest2 => val formal2a = if (tp2.isParamDependent) formal2.subst(tp2, tp1) else formal2 val paramsMatch = @@ -2126,35 +2016,29 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling paramsMatch && loop(rest1, rest2) case nil => false - } case nil => formals2.isEmpty - } // If methods have erased parameters, then the erased parameters must match val erasedValid = (!tp1.hasErasedParams && !tp2.hasErasedParams) || (tp1.erasedParams == tp2.erasedParams) erasedValid && loop(tp1.paramInfos, tp2.paramInfos) - } /** Do the parameter types of `tp1` and `tp2` match in a way that allows `tp1` * to override `tp2` ? This is the case if they're pairwise >:>. */ - def matchingPolyParams(tp1: PolyType, tp2: PolyType): Boolean = { - def loop(formals1: List[Type], formals2: List[Type]): Boolean = formals1 match { + def matchingPolyParams(tp1: PolyType, tp2: PolyType): Boolean = + def loop(formals1: List[Type], formals2: List[Type]): Boolean = formals1 match case formal1 :: rest1 => - formals2 match { + formals2 match case formal2 :: rest2 => val formal2a = formal2.subst(tp2, tp1) isSubTypeWhenFrozen(formal2a, formal1) && loop(rest1, rest2) case nil => false - } case nil => formals2.isEmpty - } loop(tp1.paramInfos, tp2.paramInfos) - } // Type equality =:= @@ -2186,20 +2070,17 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling /** Same as `isSameType` but also can be applied to overloaded TermRefs, where * two overloaded refs are the same if they have pairwise equal alternatives */ - def isSameRef(tp1: Type, tp2: Type): Boolean = trace(s"isSameRef($tp1, $tp2") { - def isSubRef(tp1: Type, tp2: Type): Boolean = tp1 match { + def isSameRef(tp1: Type, tp2: Type): Boolean = trace(s"isSameRef($tp1, $tp2"): + def isSubRef(tp1: Type, tp2: Type): Boolean = tp1 match case tp1: TermRef if tp1.isOverloaded => tp1.alternatives forall (isSubRef(_, tp2)) case _ => - tp2 match { + tp2 match case tp2: TermRef if tp2.isOverloaded => tp2.alternatives exists (isSubRef(tp1, _)) case _ => isSubType(tp1, tp2) - } - } isSubRef(tp1, tp2) && isSubRef(tp2, tp1) - } /** If the range `tp1..tp2` consist of a single type, that type, otherwise NoType`. * This is the case if `tp1 =:= tp2`, but also if `tp1 <:< tp2`, `tp1` is a singleton type, @@ -2212,17 +2093,16 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * * All consist of the single type `"name".type`. */ - def singletonInterval(tp1: Type, tp2: Type): Type = { + def singletonInterval(tp1: Type, tp2: Type): Type = def isSingletonBounds(lo: Type, hi: Type) = lo.isSingleton && hi.derivesFrom(defn.SingletonClass) && isSubTypeWhenFrozen(lo, hi) if (isSameTypeWhenFrozen(tp1, tp2)) tp1 else if (isSingletonBounds(tp1, tp2)) tp1 else if (isSingletonBounds(tp2, tp1)) tp2 else NoType - } /** The greatest lower bound of two types */ - def glb(tp1: Type, tp2: Type): Type = /*>|>*/ trace(s"glb(${tp1.show}, ${tp2.show})", subtyping, show = true) /*<|<*/ { + def glb(tp1: Type, tp2: Type): Type = /*>|>*/ trace(s"glb(${tp1.show}, ${tp2.show})", subtyping, show = true): /*<|<*/ if (tp1 eq tp2) tp1 else if !tp1.exists || (tp1 eq WildcardType) then tp2 else if !tp2.exists || (tp2 eq WildcardType) then tp1 @@ -2262,7 +2142,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling NothingType case _ => andType(tp1, tp2) case _ => andType(tp1, tp2) - } def widenInUnions(using Context): Boolean = migrateTo3 || ctx.erasedTypes @@ -2272,14 +2151,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * @param isSoft If the lub is a union, this determines whether it's a soft union. * @note We do not admit singleton types in or-types as lubs. */ - def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false, isSoft: Boolean = true): Type = /*>|>*/ trace(s"lub(${tp1.show}, ${tp2.show}, canConstrain=$canConstrain, isSoft=$isSoft)", subtyping, show = true) /*<|<*/ { + def lub(tp1: Type, tp2: Type, canConstrain: Boolean = false, isSoft: Boolean = true): Type = /*>|>*/ trace(s"lub(${tp1.show}, ${tp2.show}, canConstrain=$canConstrain, isSoft=$isSoft)", subtyping, show = true): /*<|<*/ if (tp1 eq tp2) tp1 else if !tp1.exists || (tp2 eq WildcardType) then tp1 else if !tp2.exists || (tp1 eq WildcardType) then tp2 else if tp1.isAny && !tp2.isLambdaSub || tp1.isAnyKind || isBottom(tp2) then tp1 else if tp2.isAny && !tp1.isLambdaSub || tp2.isAnyKind || isBottom(tp1) then tp2 else - def mergedLub(tp1: Type, tp2: Type): Type = { + def mergedLub(tp1: Type, tp2: Type): Type = tp1.atoms match case Atoms.Range(lo1, hi1) if !widenInUnions => tp2.atoms match @@ -2300,9 +2179,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling val tp2w = widen(tp2) if ((tp1 ne tp1w) || (tp2 ne tp2w)) lub(tp1w, tp2w, canConstrain = canConstrain, isSoft = isSoft) else orType(tp1w, tp2w, isSoft = isSoft) // no need to check subtypes again - } mergedLub(tp1.stripLazyRef, tp2.stripLazyRef) - } /** Try to produce joint arguments for a lub `A[T_1, ..., T_n] | A[T_1', ..., T_n']` using * the following strategies: @@ -2312,7 +2189,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * - otherwise a TypeBounds containing both arguments */ def lubArgs(args1: List[Type], args2: List[Type], tparams: List[TypeParamInfo], canConstrain: Boolean = false): List[Type] = - tparams match { + tparams match case tparam :: tparamsRest => val arg1 :: args1Rest = args1: @unchecked val arg2 :: args2Rest = args2: @unchecked @@ -2327,7 +2204,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling lubArg :: lubArgs(args1Rest, args2Rest, tparamsRest, canConstrain) case nil => Nil - } /** Try to produce joint arguments for a glb `A[T_1, ..., T_n] & A[T_1', ..., T_n']` using * the following strategies: @@ -2344,7 +2220,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * Therefore it is subject to Config option `alignArgsInAnd`. */ def glbArgs(args1: List[Type], args2: List[Type], tparams: List[TypeParamInfo]): List[Type] = - tparams match { + tparams match case tparam :: tparamsRest => val arg1 :: args1Rest = args1: @unchecked val arg2 :: args2Rest = args2: @unchecked @@ -2362,7 +2238,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling glbArg :: glbArgs(args1Rest, args2Rest, tparamsRest) case nil => Nil - } private def recombineAnd(tp: AndType, tp1: Type, tp2: Type) = if (!tp1.exists) tp2 @@ -2373,57 +2248,52 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling */ private def dropIfSuper(tp: Type, sub: Type): Type = if (isSubTypeWhenFrozen(sub, tp)) NoType - else tp match { + else tp match case tp @ AndType(tp1, tp2) => recombineAnd(tp, dropIfSuper(tp1, sub), dropIfSuper(tp2, sub)) case _ => tp - } /** Merge `t1` into `tp2` if t1 is a subtype of some &-summand of tp2. */ private def mergeIfSub(tp1: Type, tp2: Type): Type = if (isSubTypeWhenFrozen(tp1, tp2)) tp1 - else tp2 match { + else tp2 match case tp2 @ AndType(tp21, tp22) => val lower1 = mergeIfSub(tp1, tp21) if (lower1 eq tp21) tp2 else if (lower1.exists) lower1 & tp22 - else { + else val lower2 = mergeIfSub(tp1, tp22) if (lower2 eq tp22) tp2 else if (lower2.exists) tp21 & lower2 else NoType - } case _ => NoType - } /** Merge `tp1` into `tp2` if tp1 is a supertype of some |-summand of tp2. * @param canConstrain If true, new constraints might be added to make the merge possible. */ private def mergeIfSuper(tp1: Type, tp2: Type, canConstrain: Boolean): Type = if (isSubType(tp2, tp1, whenFrozen = !canConstrain)) tp1 - else tp2 match { + else tp2 match case tp2 @ OrType(tp21, tp22) => val higher1 = mergeIfSuper(tp1, tp21, canConstrain) if (higher1 eq tp21) tp2 else if (higher1.exists) lub(higher1, tp22, isSoft = tp2.isSoft) - else { + else val higher2 = mergeIfSuper(tp1, tp22, canConstrain) if (higher2 eq tp22) tp2 else if (higher2.exists) lub(tp21, higher2, isSoft = tp2.isSoft) else NoType - } case _ => NoType - } private def andTypeGen(tp1: Type, tp2: Type, op: (Type, Type) => Type, - original: (Type, Type) => Type = _ & _, isErased: Boolean = ctx.erasedTypes): Type = trace(s"andTypeGen(${tp1.show}, ${tp2.show})", subtyping, show = true) { + original: (Type, Type) => Type = _ & _, isErased: Boolean = ctx.erasedTypes): Type = trace(s"andTypeGen(${tp1.show}, ${tp2.show})", subtyping, show = true): val t1 = distributeAnd(tp1, tp2) if (t1.exists) t1 - else { + else val t2 = distributeAnd(tp2, tp1) if (t2.exists) t2 else if (isErased) erasedGlb(tp1, tp2) @@ -2437,8 +2307,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling // // Here, `F` is treated as bivariant in `O`. That is, only bivariant implementation // of `F` are allowed. See neg/hk-variance2s.scala test. - } - } /** Form a normalized conjunction of two types. * Note: For certain types, `&` is distributed inside the type. This holds for @@ -2464,16 +2332,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * @param isErased Apply erasure semantics. If erased is true, instead of creating * an OrType, the lub will be computed using TypeCreator#erasedLub. */ - final def orType(tp1: Type, tp2: Type, isSoft: Boolean = true, isErased: Boolean = ctx.erasedTypes): Type = { + final def orType(tp1: Type, tp2: Type, isSoft: Boolean = true, isErased: Boolean = ctx.erasedTypes): Type = val t1 = distributeOr(tp1, tp2, isSoft) if (t1.exists) t1 - else { + else val t2 = distributeOr(tp2, tp1, isSoft) if (t2.exists) t2 else if (isErased) erasedLub(tp1, tp2) else liftIfHK(tp1, tp2, OrType.balanced(_, _, soft = isSoft), _ | _, _ & _) - } - } /** `op(tp1, tp2)` unless `tp1` and `tp2` are type-constructors. * In the latter case, combine `tp1` and `tp2` under a type lambda like this: @@ -2481,7 +2347,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * [X1, ..., Xn] -> op(tp1[X1, ..., Xn], tp2[X1, ..., Xn]) */ def liftIfHK(tp1: Type, tp2: Type, - op: (Type, Type) => Type, original: (Type, Type) => Type, combineVariance: (Variance, Variance) => Variance) = { + op: (Type, Type) => Type, original: (Type, Type) => Type, combineVariance: (Variance, Variance) => Variance) = val tparams1 = tp1.typeParams val tparams2 = tp2.typeParams def applied(tp: Type) = tp.appliedTo(tp.typeParams.map(_.paramInfoAsSeenFrom(tp))) @@ -2501,17 +2367,16 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling paramInfosExp = tl => tparams1.lazyZip(tparams2).map((tparam1, tparam2) => tl.integrate(tparams1, tparam1.paramInfoAsSeenFrom(tp1)).bounds & tl.integrate(tparams2, tparam2.paramInfoAsSeenFrom(tp2)).bounds), - resultTypeExp = tl => + resultTypeExp = tl => original(tp1.appliedTo(tl.paramRefs), tp2.appliedTo(tl.paramRefs))) else original(applied(tp1), applied(tp2)) - } /** Try to distribute `&` inside type, detect and handle conflicts * @pre !(tp1 <: tp2) && !(tp2 <:< tp1) -- these cases were handled before */ - private def distributeAnd(tp1: Type, tp2: Type): Type = tp1 match { + private def distributeAnd(tp1: Type, tp2: Type): Type = tp1 match case tp1 @ AppliedType(tycon1, args1) => - tp2 match { + tp2 match case AppliedType(tycon2, args2) if tycon1.typeSymbol == tycon2.typeSymbol && tycon1 =:= tycon2 => val jointArgs = glbArgs(args1, args2, tycon1.typeParams) @@ -2519,12 +2384,11 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling else NoType case _ => NoType - } case tp1: RefinedType => // opportunistically merge same-named refinements // this does not change anything semantically (i.e. merging or not merging // gives =:= types), but it keeps the type smaller. - tp2 match { + tp2 match case tp2: RefinedType if tp1.refinedName == tp2.refinedName => val jointInfo = Denotations.infoMeet(tp1.refinedInfo, tp2.refinedInfo, safeIntersection = false) if jointInfo.exists then @@ -2533,16 +2397,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling NoType case _ => NoType - } case tp1: RecType => tp1.rebind(distributeAnd(tp1.parent, tp2)) case ExprType(rt1) => - tp2 match { + tp2 match case ExprType(rt2) => ExprType(rt1 & rt2) case _ => NoType - } case tp1: TypeVar if tp1.isInstantiated => tp1.underlying & tp2 case CapturingType(parent1, refs1) => @@ -2556,7 +2418,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling tp1.underlying & tp2 case _ => NoType - } /** Try to distribute `|` inside type, detect and handle conflicts * Note that, unlike for `&`, a disjunction cannot be pushed into @@ -2566,53 +2427,47 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * * The rhs is a proper supertype of the lhs. */ - private def distributeOr(tp1: Type, tp2: Type, isSoft: Boolean = true): Type = tp1 match { + private def distributeOr(tp1: Type, tp2: Type, isSoft: Boolean = true): Type = tp1 match case ExprType(rt1) => - tp2 match { + tp2 match case ExprType(rt2) => ExprType(lub(rt1, rt2, isSoft = isSoft)) case _ => NoType - } case tp1: TypeVar if tp1.isInstantiated => lub(tp1.underlying, tp2, isSoft = isSoft) case tp1: AnnotatedType if !tp1.isRefining => lub(tp1.underlying, tp2, isSoft = isSoft) case _ => NoType - } /** A comparison function to pick a winner in case of a merge conflict */ - private def isAsGood(tp1: Type, tp2: Type): Boolean = tp1 match { + private def isAsGood(tp1: Type, tp2: Type): Boolean = tp1 match case tp1: ClassInfo => - tp2 match { + tp2 match case tp2: ClassInfo => isSubTypeWhenFrozen(tp1.prefix, tp2.prefix) || (tp1.cls.owner derivesFrom tp2.cls.owner) case _ => false - } case tp1: PolyType => - tp2 match { + tp2 match case tp2: PolyType => tp1.typeParams.length == tp2.typeParams.length && isAsGood(tp1.resultType, tp2.resultType.subst(tp2, tp1)) case _ => false - } case tp1: MethodType => - tp2 match { + tp2 match case tp2: MethodType => def asGoodParams(formals1: List[Type], formals2: List[Type]) = (formals2 corresponds formals1)(isSubTypeWhenFrozen) asGoodParams(tp1.paramInfos, tp2.paramInfos) && (!asGoodParams(tp2.paramInfos, tp1.paramInfos) || - isAsGood(tp1.resultType, tp2.resultType)) + isAsGood(tp1.resultType, tp2.resultType)) case _ => false - } case _ => false - } protected def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = refs1.subCaptures(refs2, frozen) @@ -2631,28 +2486,25 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def traceIndented[T](str: String)(op: => T): T = op private def traceInfo(tp1: Type, tp2: Type) = - s"${tp1.show} <:< ${tp2.show}" + { + s"${tp1.show} <:< ${tp2.show}" `+`: if (ctx.settings.verbose.value || Config.verboseExplainSubtype) s" ${tp1.getClass}, ${tp2.getClass}" + (if (frozenConstraint) " frozen" else "") + (if (ctx.mode is Mode.TypevarsMissContext) " tvars-miss-ctx" else "") else "" - } /** Show subtype goal that led to an assertion failure */ def showGoal(tp1: Type, tp2: Type)(using Context): Unit = try report.echo(i"assertion failure for ${show(tp1)} <:< ${show(tp2)}, frozen = $frozenConstraint") - def explainPoly(tp: Type) = tp match { + def explainPoly(tp: Type) = tp match case tp: TypeParamRef => report.echo(s"TypeParamRef ${tp.show} found in ${tp.binder.show}") case tp: TypeRef if tp.symbol.exists => report.echo(s"typeref ${tp.show} found in ${tp.symbol.owner.show}") case tp: TypeVar => report.echo(s"typevar ${tp.show}, origin = ${tp.origin}") case _ => report.echo(s"${tp.show} is a ${tp.getClass}") - } - if (Config.verboseExplainSubtype) { + if (Config.verboseExplainSubtype) explainPoly(tp1) explainPoly(tp2) - } catch case NonFatal(ex) => report.echo(s"assertion failure [[cannot display since $ex was thrown]]") @@ -2660,17 +2512,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * and the number of "successful" subtype checks, i.e. checks * that form part of a subtype derivation tree that's ultimately successful. */ - def recordStatistics(result: Boolean, prevSuccessCount: Int): Unit = { + def recordStatistics(result: Boolean, prevSuccessCount: Int): Unit = // Stats.record(s"isSubType ${tp1.show} <:< ${tp2.show}") totalCount += 1 if (result) successCount += 1 else successCount = prevSuccessCount - if (recCount == 0) { + if (recCount == 0) Stats.record("successful subType", successCount) Stats.record("total subType", totalCount) successCount = 0 totalCount = 0 - } - } /** Does `tycon` have a field with type `tparam`? Special cased for `scala.*:` * as that type is artificially added to tuples. */ @@ -2687,7 +2537,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * `true` implies that we found a proof; uncertainty defaults to `false`. */ def provablyEmpty(tp: Type): Boolean = - tp.dealias match { + tp.dealias match case tp if tp.isExactlyNothing => true case AndType(tp1, tp2) => provablyDisjoint(tp1, tp2) case OrType(tp1, tp2) => provablyEmpty(tp1) && provablyEmpty(tp2) @@ -2700,7 +2550,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling case tp: TypeProxy => provablyEmpty(tp.underlying) case _ => false - } /** Are `tp1` and `tp2` provablyDisjoint types? * @@ -2718,7 +2567,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling * property that in all possible contexts, the same match type expression * is either stuck or reduces to the same case. */ - def provablyDisjoint(tp1: Type, tp2: Type)(using Context): Boolean = trace(i"provable disjoint $tp1, $tp2", matchTypes) { + def provablyDisjoint(tp1: Type, tp2: Type)(using Context): Boolean = trace(i"provable disjoint $tp1, $tp2", matchTypes): // println(s"provablyDisjoint(${tp1.show}, ${tp2.show})") def isEnumValue(ref: TermRef): Boolean = @@ -2733,16 +2582,14 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def fullyInstantiated(tp: Type): Boolean = new TypeAccumulator[Boolean] { override def apply(x: Boolean, t: Type) = - x && { - t.dealias match { + x `&&`: + t.dealias match case tp: TypeRef if !tp.symbol.isClass => false case _: SkolemType | _: TypeVar | _: TypeParamRef | _: TypeBounds => false case _ => foldOver(x, t) - } - } }.apply(true, tp) - (tp1.dealias, tp2.dealias) match { + (tp1.dealias, tp2.dealias) match case _ if !ctx.erasedTypes && tp2.isFromJavaObject => provablyDisjoint(tp1, defn.AnyType) case _ if !ctx.erasedTypes && tp1.isFromJavaObject => @@ -2806,7 +2653,7 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling fullyInstantiated(tp1) && // We can only trust a "no" from `isSameType` when fullyInstantiated(tp2) // both `tp1` and `tp2` are fully instantiated. - args1.lazyZip(args2).lazyZip(tycon1.typeParams).exists { + args1.lazyZip(args2).lazyZip(tycon1.typeParams).exists: (arg1, arg2, tparam) => val v = tparam.paramVarianceSign if (v > 0) @@ -2817,7 +2664,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling false else invariantDisjoint(arg1, arg2, tparam) - } case (tp1: HKLambda, tp2: HKLambda) => provablyDisjoint(tp1.resType, tp2.resType) case (_: HKLambda, _) => @@ -2863,8 +2709,6 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling provablyDisjoint(tp1, tp2.superTypeNormalized) case _ => false - } - } protected def explainingTypeComparer = ExplainingTypeComparer(comparerContext) protected def trackingTypeComparer = TrackingTypeComparer(comparerContext) @@ -2883,17 +2727,15 @@ class TypeComparer(@constructorOnly initctx: Context) extends ConstraintHandling def tracked[T](op: TrackingTypeComparer => T)(using Context): T = inSubComparer(trackingTypeComparer)(op) -} -object TypeComparer { +object TypeComparer: enum CompareResult: case OK, Fail, OKwithGADTUsed /** Class for unification variables used in `natValue`. */ - private class AnyConstantType extends UncachedGroundType with ValueType { + private class AnyConstantType extends UncachedGroundType with ValueType: var tpe: Type = NoType - } private[core] def show(res: Any)(using Context): String = if ctx.settings.YexplainLowlevel.value then String.valueOf(res).nn @@ -3041,7 +2883,6 @@ object TypeComparer { def tracked[T](op: TrackingTypeComparer => T)(using Context): T = comparing(_.tracked(op)) -} object TrackingTypeComparer: import printing.*, Texts.* @@ -3057,7 +2898,7 @@ object TrackingTypeComparer: case Stuck => "Stuck" case NoInstance(fails) => "NoInstance(" ~ Text(fails.map(p.toText(_) ~ p.toText(_)), ", ") ~ ")" -class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { +class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx): import TrackingTypeComparer.* init(initctx) @@ -3066,31 +2907,27 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { val footprint: mutable.Set[Type] = mutable.Set[Type]() - override def bounds(param: TypeParamRef)(using Context): TypeBounds = { + override def bounds(param: TypeParamRef)(using Context): TypeBounds = if (param.binder `ne` caseLambda) footprint += param super.bounds(param) - } - override def addOneBound(param: TypeParamRef, bound: Type, isUpper: Boolean)(using Context): Boolean = { + override def addOneBound(param: TypeParamRef, bound: Type, isUpper: Boolean)(using Context): Boolean = if (param.binder `ne` caseLambda) footprint += param super.addOneBound(param, bound, isUpper) - } - override def gadtBounds(sym: Symbol)(using Context): TypeBounds | Null = { + override def gadtBounds(sym: Symbol)(using Context): TypeBounds | Null = if (sym.exists) footprint += sym.typeRef super.gadtBounds(sym) - } override def gadtAddBound(sym: Symbol, b: Type, isUpper: Boolean): Boolean = if (sym.exists) footprint += sym.typeRef super.gadtAddBound(sym, b, isUpper) - override def typeVarInstance(tvar: TypeVar)(using Context): Type = { + override def typeVarInstance(tvar: TypeVar)(using Context): Type = footprint += tvar super.typeVarInstance(tvar) - } - def matchCases(scrut: Type, cases: List[Type])(using Context): Type = { + def matchCases(scrut: Type, cases: List[Type])(using Context): Type = def paramInstances(canApprox: Boolean) = new TypeAccumulator[Array[Type]]: def apply(insts: Array[Type], t: Type) = t match @@ -3110,24 +2947,21 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { case _ => foldOver(insts, t) - def instantiateParams(insts: Array[Type]) = new ApproximatingTypeMap { + def instantiateParams(insts: Array[Type]) = new ApproximatingTypeMap: variance = 0 - def apply(t: Type) = t match { + def apply(t: Type) = t match case t @ TypeParamRef(b, n) if b `eq` caseLambda => insts(n) case t: LazyRef => apply(t.ref) case _ => mapOver(t) - } - } /** Match a single case. */ - def matchCase(cas: Type): MatchResult = trace(i"$scrut match ${MatchTypeTrace.caseText(cas)}", matchTypes, show = true) { - val cas1 = cas match { + def matchCase(cas: Type): MatchResult = trace(i"$scrut match ${MatchTypeTrace.caseText(cas)}", matchTypes, show = true): + val cas1 = cas match case cas: HKTypeLambda => caseLambda = constrained(cas) caseLambda.resultType case _ => cas - } val defn.MatchCase(pat, body) = cas1: @unchecked @@ -3143,11 +2977,9 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { val instances = paramInstances(canApprox)(Array.fill(caseLambda.paramNames.length)(NoType), pat) instantiateParams(instances)(body) match case Range(lo, hi) => - MatchResult.NoInstance { - caseLambda.paramNames.zip(instances).collect { + MatchResult.NoInstance: + caseLambda.paramNames.zip(instances).collect: case (name, Range(lo, hi)) => (name, TypeBounds(lo, hi)) - } - } case redux => MatchResult.Reduced(redux.simplified) case _ => @@ -3163,7 +2995,6 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { MatchResult.Disjoint else MatchResult.Stuck - } def recur(remaining: List[Type]): Type = remaining match case cas :: remaining1 => @@ -3182,7 +3013,7 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { val casesText = MatchTypeTrace.noMatchesText(scrut, cases) throw MatchTypeReductionError(em"Match type reduction $casesText") - inFrozenConstraint { + inFrozenConstraint: // Empty types break the basic assumption that if a scrutinee and a // pattern are disjoint it's OK to reduce passed that pattern. Indeed, // empty types viewed as a set of value is always a subset of any other @@ -3201,12 +3032,9 @@ class TrackingTypeComparer(initctx: Context) extends TypeComparer(initctx) { NoType else recur(cases) - } - } -} /** A type comparer that can record traces of subtype operations */ -class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { +class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx): import TypeComparer._ init(initctx) @@ -3220,7 +3048,7 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { override def traceIndented[T](str: String)(op: => T): T = if (skipped) op - else { + else indent += 2 val str1 = str.replace('\n', ' ') b.append("\n").append(" " * indent).append("==> ").append(str1) @@ -3228,7 +3056,6 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { b.append("\n").append(" " * indent).append("<== ").append(str1).append(" = ").append(show(res)) indent -= 2 res - } private def frozenNotice: String = if frozenConstraint then " in frozen constraint" else "" @@ -3239,39 +3066,31 @@ class ExplainingTypeComparer(initctx: Context) extends TypeComparer(initctx) { then s" ${tp1.getClass} ${tp2.getClass}" else "" val approx = approxState - traceIndented(s"${show(tp1)} <: ${show(tp2)}$moreInfo${approx.show}$frozenNotice") { + traceIndented(s"${show(tp1)} <: ${show(tp2)}$moreInfo${approx.show}$frozenNotice"): super.recur(tp1, tp2) - } override def hasMatchingMember(name: Name, tp1: Type, tp2: RefinedType): Boolean = - traceIndented(s"hasMatchingMember(${show(tp1)} . $name, ${show(tp2.refinedInfo)}), member = ${show(tp1.member(name).info)}") { + traceIndented(s"hasMatchingMember(${show(tp1)} . $name, ${show(tp2.refinedInfo)}), member = ${show(tp1.member(name).info)}"): super.hasMatchingMember(name, tp1, tp2) - } override def lub(tp1: Type, tp2: Type, canConstrain: Boolean, isSoft: Boolean): Type = - traceIndented(s"lub(${show(tp1)}, ${show(tp2)}, canConstrain=$canConstrain, isSoft=$isSoft)") { + traceIndented(s"lub(${show(tp1)}, ${show(tp2)}, canConstrain=$canConstrain, isSoft=$isSoft)"): super.lub(tp1, tp2, canConstrain, isSoft) - } override def glb(tp1: Type, tp2: Type): Type = - traceIndented(s"glb(${show(tp1)}, ${show(tp2)})") { + traceIndented(s"glb(${show(tp1)}, ${show(tp2)})"): super.glb(tp1, tp2) - } override def addConstraint(param: TypeParamRef, bound: Type, fromBelow: Boolean)(using Context): Boolean = - traceIndented(s"add constraint ${show(param)} ${if (fromBelow) ">:" else "<:"} ${show(bound)} $frozenNotice, constraint = ${show(ctx.typerState.constraint)}") { + traceIndented(s"add constraint ${show(param)} ${if (fromBelow) ">:" else "<:"} ${show(bound)} $frozenNotice, constraint = ${show(ctx.typerState.constraint)}"): super.addConstraint(param, bound, fromBelow) - } override def gadtAddBound(sym: Symbol, b: Type, isUpper: Boolean): Boolean = - traceIndented(s"add GADT constraint ${show(sym)} ${if isUpper then "<:" else ">:"} ${show(b)} $frozenNotice, GADT constraint = ${show(ctx.gadt)}") { + traceIndented(s"add GADT constraint ${show(sym)} ${if isUpper then "<:" else ">:"} ${show(b)} $frozenNotice, GADT constraint = ${show(ctx.gadt)}"): super.gadtAddBound(sym, b, isUpper) - } override def subCaptures(refs1: CaptureSet, refs2: CaptureSet, frozen: Boolean)(using Context): CaptureSet.CompareResult = - traceIndented(i"subcaptures $refs1 <:< $refs2 ${if frozen then "frozen" else ""}") { + traceIndented(i"subcaptures $refs1 <:< $refs2 ${if frozen then "frozen" else ""}"): super.subCaptures(refs1, refs2, frozen) - } def lastTrace(header: String): String = header + { try b.toString finally b.clear() } -} diff --git a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala index 50e7e1847adf..3132cf9889a9 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErasure.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErasure.scala @@ -69,27 +69,25 @@ end SourceLanguage * only for isInstanceOf, asInstanceOf: PolyType, TypeParamRef, TypeBounds * */ -object TypeErasure { +object TypeErasure: private def erasureDependsOnArgs(sym: Symbol)(using Context) = sym == defn.ArrayClass || sym == defn.PairClass || isDerivedValueClass(sym) - def normalizeClass(cls: ClassSymbol)(using Context): ClassSymbol = { - if (cls.owner == defn.ScalaPackageClass) { + def normalizeClass(cls: ClassSymbol)(using Context): ClassSymbol = + if (cls.owner == defn.ScalaPackageClass) if (defn.specialErasure.contains(cls)) return defn.specialErasure(cls).uncheckedNN if (cls == defn.UnitClass) return defn.BoxedUnitClass - } cls - } /** A predicate that tests whether a type is a legal erased type. Only asInstanceOf and * isInstanceOf may have types that do not satisfy the predicate. * ErasedValueType is considered an erased type because it is valid after Erasure (it is * eliminated by ElimErasedValueType). */ - def isErasedType(tp: Type)(using Context): Boolean = tp match { + def isErasedType(tp: Type)(using Context): Boolean = tp match case _: ErasedValueType => true case tp: TypeRef => @@ -114,7 +112,6 @@ object TypeErasure { true case _ => false - } /** A type representing the semi-erasure of a derived value class, see SIP-15 * where it's called "C$unboxed" for a class C. @@ -129,19 +126,16 @@ object TypeErasure { * @param erasedUnderlying The erased type of the single field of the value class */ abstract case class ErasedValueType(tycon: TypeRef, erasedUnderlying: Type) - extends CachedGroundType with ValueType { + extends CachedGroundType with ValueType: override def computeHash(bs: Hashable.Binders): Int = doHash(bs, tycon, erasedUnderlying) - } final class CachedErasedValueType(tycon: TypeRef, erasedUnderlying: Type) extends ErasedValueType(tycon, erasedUnderlying) - object ErasedValueType { - def apply(tycon: TypeRef, erasedUnderlying: Type)(using Context): ErasedValueType = { + object ErasedValueType: + def apply(tycon: TypeRef, erasedUnderlying: Type)(using Context): ErasedValueType = assert(erasedUnderlying.exists) unique(new CachedErasedValueType(tycon, erasedUnderlying)) - } - } private def erasureIdx(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConstructor: Boolean, isSymbol: Boolean, wildcardOK: Boolean) = extension (b: Boolean) def toInt = if b then 1 else 0 @@ -198,16 +192,15 @@ object TypeErasure { case ErasedValueType(_, underlying) => erasure(underlying) case etp => etp - def sigName(tp: Type, sourceLanguage: SourceLanguage)(using Context): TypeName = { + def sigName(tp: Type, sourceLanguage: SourceLanguage)(using Context): TypeName = val normTp = tp.translateFromRepeated(toArray = sourceLanguage.isJava) val erase = erasureFn(sourceLanguage, semiEraseVCs = !sourceLanguage.isJava, isConstructor = false, isSymbol = false, wildcardOK = true) erase.sigName(normTp)(using preErasureCtx) - } /** The erasure of a top-level reference. Differs from normal erasure in that * TermRefs are kept instead of being widened away. */ - def erasedRef(tp: Type)(using Context): Type = tp match { + def erasedRef(tp: Type)(using Context): Type = tp match case tp: TermRef => assert(tp.symbol.exists, tp) val tp1 = makePackageObjPrefixExplicit(tp) @@ -217,7 +210,6 @@ object TypeErasure { tp case tp => valueErasure(tp) - } /** The symbol's erased info. This is the type's erasure, except for the following symbols: * @@ -227,7 +219,7 @@ object TypeErasure { * * `sourceLanguage`, `isConstructor` and `semiEraseVCs` are set based on the symbol. */ - def transformInfo(sym: Symbol, tp: Type)(using Context): Type = { + def transformInfo(sym: Symbol, tp: Type)(using Context): Type = val sourceLanguage = SourceLanguage(sym) val semiEraseVCs = !sourceLanguage.isJava // Java sees our value classes as regular classes. val erase = erasureFn(sourceLanguage, semiEraseVCs, sym.isConstructor, isSymbol = true, wildcardOK = false) @@ -241,7 +233,7 @@ object TypeErasure { else if sym.is(ConstructorProxy) then NoType else if (sym.isConstructor) outer.addParam(sym.owner.asClass, erase(tp)(using preErasureCtx)) else if (sym.is(Label)) erase.eraseResult(sym.info)(using preErasureCtx) - else erase.eraseInfo(tp, sym)(using preErasureCtx) match { + else erase.eraseInfo(tp, sym)(using preErasureCtx) match case einfo: MethodType => if (sym.isGetter && einfo.resultType.isRef(defn.UnitClass)) MethodType(Nil, defn.BoxedUnitClass.typeRef) @@ -267,8 +259,6 @@ object TypeErasure { defn.ObjectType else einfo - } - } /** Is `Array[tp]` a generic Array that needs to be erased to `Object`? * This is true if among the subtypes of `Array[tp]` there is either: @@ -281,7 +271,7 @@ object TypeErasure { * also return true for element types upper-bounded by a non-reference type * such as in `Array[_ <: Int]` or `Array[_ <: UniversalTrait]`. */ - def isGenericArrayElement(tp: Type, isScala2: Boolean)(using Context): Boolean = { + def isGenericArrayElement(tp: Type, isScala2: Boolean)(using Context): Boolean = /** A symbol that represents the sort of JVM array that values of type `t` can be stored in: * - If we can always store such values in a reference array, return Object * - If we can always store them in a specific primitive array, return the @@ -317,7 +307,7 @@ object TypeErasure { /** Can one of the JVM Array type store all possible values of type `t`? */ def fitsInJVMArray(t: Type): Boolean = arrayUpperBound(t).exists - tp.dealias match { + tp.dealias match case tp: TypeRef if !tp.symbol.isOpaqueAlias => !tp.symbol.isClass && !tp.symbol.is(JavaDefined) && // In Java code, Array[T] can never erase to Object @@ -338,18 +328,15 @@ object TypeErasure { case tp: OrType => isGenericArrayElement(tp.tp1, isScala2) || isGenericArrayElement(tp.tp2, isScala2) case _ => false - } - } /** Is `tp` an abstract type or polymorphic type parameter, or another unbounded generic type? */ - def isGeneric(tp: Type)(using Context): Boolean = tp.dealias match { + def isGeneric(tp: Type)(using Context): Boolean = tp.dealias match case tp: TypeRef if !tp.symbol.isOpaqueAlias => !tp.symbol.isClass case tp: TypeParamRef => true case tp: TypeProxy => isGeneric(tp.translucentSuperType) case tp: AndType => isGeneric(tp.tp1) || isGeneric(tp.tp2) case tp: OrType => isGeneric(tp.tp1) || isGeneric(tp.tp2) case _ => false - } /** The erased least upper bound of two erased types is computed as follows * - if both argument are arrays of objects, an array of the erased lub of the element types @@ -365,17 +352,17 @@ object TypeErasure { * The reason to pick last is that we prefer classes over traits that way, * which leads to more predictable bytecode and (?) faster dynamic dispatch. */ - def erasedLub(tp1: Type, tp2: Type)(using Context): Type = { + def erasedLub(tp1: Type, tp2: Type)(using Context): Type = // We need to short-circuit the following 2 case because the regular lub logic in the else relies on // the class hierarchy, which doesn't properly capture `Nothing`/`Null` subtyping behaviour. if tp1.isRef(defn.NothingClass) || (tp1.isRef(defn.NullClass) && tp2.derivesFrom(defn.ObjectClass)) then tp2 // After erasure, Nothing | T is just T and Null | C is just C, if C is a reference type. else if tp2.isRef(defn.NothingClass) || (tp2.isRef(defn.NullClass) && tp1.derivesFrom(defn.ObjectClass)) then tp1 // After erasure, T | Nothing is just T and C | Null is just C, if C is a reference type. - else tp1 match { + else tp1 match case JavaArrayType(elem1) => import dotty.tools.dotc.transform.TypeUtils._ - tp2 match { + tp2 match case JavaArrayType(elem2) => if (elem1.isPrimitiveValueType || elem2.isPrimitiveValueType) if (elem1.classSymbol eq elem2.classSymbol) // same primitive @@ -383,22 +370,19 @@ object TypeErasure { else defn.ObjectType else JavaArrayType(erasedLub(elem1, elem2)) case _ => defn.ObjectType - } case _ => - tp2 match { + tp2 match case JavaArrayType(_) => defn.ObjectType case _ => val cls2 = tp2.classSymbol /** takeWhile+1 */ - def takeUntil[T](l: List[T])(f: T => Boolean): List[T] = { + def takeUntil[T](l: List[T])(f: T => Boolean): List[T] = @tailrec def loop(tail: List[T], acc: List[T]): List[T] = - tail match { + tail match case h :: t => loop(if (f(h)) t else Nil, h :: acc) case Nil => acc.reverse - } loop(l, Nil) - } // We are not interested in anything that is not a supertype of tp2 val tp2superclasses = tp1.baseClasses.filter(cls2.derivesFrom) @@ -417,13 +401,9 @@ object TypeErasure { } // Pick the last minimum to prioritise classes over traits - minimums.lastOption match { + minimums.lastOption match case Some(lub) => valueErasure(lub.typeRef) case _ => defn.ObjectType - } - } - } - } /** The erased greatest lower bound of two erased type picks one of the two argument types. * @@ -513,13 +493,12 @@ object TypeErasure { /** Does the (possibly generic) type `tp` have the same erasure in all its * possible instantiations? */ - def hasStableErasure(tp: Type)(using Context): Boolean = tp match { + def hasStableErasure(tp: Type)(using Context): Boolean = tp match case tp: TypeRef if !tp.symbol.isOpaqueAlias => - tp.info match { + tp.info match case TypeAlias(alias) => hasStableErasure(alias) case _: ClassInfo => true case _ => false - } case _: TypeParamRef => false case _: TypeBounds => false case _: MatchType => false @@ -527,7 +506,6 @@ object TypeErasure { case tp: AndType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2) case tp: OrType => hasStableErasure(tp.tp1) && hasStableErasure(tp.tp2) case _ => false - } /** The erasure of `PolyFunction { def apply: $applyInfo }` */ def erasePolyFunctionApply(applyInfo: Type)(using Context): Type = @@ -544,7 +522,6 @@ object TypeErasure { isContextual = erasedFn.isContextualMethod, ) erasure(fnType) -} import TypeErasure._ @@ -559,7 +536,7 @@ import TypeErasure._ * @param wildcardOK Wildcards are acceptable (true when using the erasure * for computing a signature name). */ -class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConstructor: Boolean, isSymbol: Boolean, wildcardOK: Boolean) { +class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConstructor: Boolean, isSymbol: Boolean, wildcardOK: Boolean): /** The erasure |T| of a type T. This is: * @@ -594,7 +571,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst * - For NoType or NoPrefix, the type itself. * - For any other type, exception. */ - private def apply(tp: Type)(using Context): Type = tp match { + private def apply(tp: Type)(using Context): Type = tp match case _: ErasedValueType => tp case tp: TypeRef => @@ -656,7 +633,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst .unzip else (tp.paramNames, tp.paramInfos) val formals = formals0.mapConserve(paramErasure) - eraseResult(tp.resultType) match { + eraseResult(tp.resultType) match case rt: MethodType => tp.derivedLambdaType(names ++ rt.paramNames, formals ++ rt.paramInfos, rt.resultType) case NoType => @@ -666,25 +643,22 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst NoType case rt => tp.derivedLambdaType(names, formals, rt) - } case tp: PolyType => this(tp.resultType) case tp @ ClassInfo(pre, cls, parents, decls, _) => if (cls.is(Package)) tp - else { - def eraseParent(tp: Type) = tp.dealias match { // note: can't be opaque, since it's a class parent + else + def eraseParent(tp: Type) = tp.dealias match // note: can't be opaque, since it's a class parent case tp: AppliedType if tp.tycon.isRef(defn.PairClass) => defn.ObjectType case _ => apply(tp) - } val erasedParents: List[Type] = if ((cls eq defn.ObjectClass) || cls.isPrimitiveValueClass) Nil - else parents.mapConserve(eraseParent) match { + else parents.mapConserve(eraseParent) match case tr :: trs1 => assert(!tr.classSymbol.is(Trait), i"$cls has bad parents $parents%, %") val tr1 = if (cls.is(Trait)) defn.ObjectType else tr tr1 :: trs1.filterNot(_.isAnyRef) case nil => nil - } var erasedDecls = decls.filteredScope(sym => !sym.isType || sym.isClass).openForMutations for dcl <- erasedDecls.iterator do if dcl.lastKnownDenotation.unforcedAnnotation(defn.TargetNameAnnot).isDefined @@ -696,14 +670,12 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst val selfType1 = if cls.is(Module) then cls.sourceModule.termRef else NoType tp.derivedClassInfo(NoPrefix, erasedParents, erasedDecls, selfType1) // can't replace selftype by NoType because this would lose the sourceModule link - } case _: ErrorType | JavaArrayType(_) => tp case tp: WildcardType if wildcardOK => tp case tp if (tp `eq` NoType) || (tp `eq` NoPrefix) => tp - } /** Like translucentSuperType, but issue a fatal error if it does not exist. */ private def checkedSuperType(tp: TypeProxy)(using Context): Type = @@ -730,23 +702,21 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst case tpw @ MethodType(Nil) if tp.symbol.isGetter => tpw.resultType case tpw => tpw - private def eraseArray(tp: Type)(using Context) = { + private def eraseArray(tp: Type)(using Context) = val defn.ArrayOf(elemtp) = tp: @unchecked if (isGenericArrayElement(elemtp, isScala2 = sourceLanguage.isScala2)) defn.ObjectType else try JavaArrayType(erasureFn(sourceLanguage, semiEraseVCs = false, isConstructor, isSymbol, wildcardOK)(elemtp)) catch case ex: Throwable => handleRecursive("erase array type", tp.show, ex) - } - private def erasePair(tp: Type)(using Context): Type = { + private def erasePair(tp: Type)(using Context): Type = // NOTE: `tupleArity` does not consider TypeRef(EmptyTuple$) equivalent to EmptyTuple.type, // we fix this for printers, but type erasure should be preserved. val arity = tp.tupleArity if (arity < 0) defn.ProductClass.typeRef else if (arity <= Definitions.MaxTupleArity) defn.TupleType(arity).nn else defn.TupleXXLClass.typeRef - } /** The erasure of a symbol's info. This is different from `apply` in the way `ExprType`s and * `PolyType`s are treated. `eraseInfo` maps them them to method types, whereas `apply` maps them @@ -772,7 +742,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst case rt => MethodType(Nil, Nil, rt) case tp1 => this(tp1) - private def eraseDerivedValueClass(tp: Type)(using Context): Type = { + private def eraseDerivedValueClass(tp: Type)(using Context): Type = val cls = tp.classSymbol.asClass val unbox = valueClassUnbox(cls) if unbox.exists then @@ -806,12 +776,10 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst assert(ctx.reporter.errorsReported, i"no erasure for $underlying") NoType else NoType - } - private def eraseNormalClassRef(tref: TypeRef)(using Context): Type = { + private def eraseNormalClassRef(tref: TypeRef)(using Context): Type = val cls = tref.symbol.asClass (if (cls.owner.is(Package)) normalizeClass(cls) else cls).typeRef - } /** The erasure of a function result type. */ def eraseResult(tp: Type)(using Context): Type = @@ -836,21 +804,19 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst * Need to ensure correspondence with erasure! */ private def sigName(tp: Type)(using Context): TypeName = try - tp match { + tp match case tp: TypeRef => if (!tp.denot.exists) // println(i"missing: ${tp.toString} ${tp.denot} / ${tp.prefix.member(tp.name)}") throw new MissingType(tp.prefix, tp.name) val sym = tp.symbol - if (!sym.isClass) { + if (!sym.isClass) val info = checkedSuperType(tp) if (!info.exists) assert(false, i"undefined: $tp with symbol $sym") return sigName(info) - } - if (semiEraseVCs && isDerivedValueClass(sym)) { + if (semiEraseVCs && isDerivedValueClass(sym)) val erasedVCRef = eraseDerivedValueClass(tp) if (erasedVCRef.exists) return sigName(erasedVCRef) - } if (defn.isSyntheticFunctionClass(sym)) sigName(defn.functionTypeErasure(sym)) else @@ -898,10 +864,7 @@ class TypeErasure(sourceLanguage: SourceLanguage, semiEraseVCs: Boolean, isConst val erasedTp = this(tp) assert(erasedTp ne tp, tp) sigName(erasedTp) - } - catch { + catch case ex: AssertionError => println(s"no sig for $tp because of ${ex.printStackTrace()}") throw ex - } -} diff --git a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala index f59bd08da779..5fd1739bb163 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeErrors.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeErrors.scala @@ -53,11 +53,10 @@ class MalformedType(pre: Type, denot: Denotation, absMembers: Set[Name])(using C def toMessage(using Context) = em"malformed type: $pre is not a legal prefix for $denot because it contains abstract type member${if (absMembers.size == 1) "" else "s"} ${absMembers.mkString(", ")}" class MissingType(pre: Type, name: Name)(using Context) extends TypeError: - private def otherReason(pre: Type)(using Context): String = pre match { + private def otherReason(pre: Type)(using Context): String = pre match case pre: ThisType if pre.cls.givenSelfType.exists => i"\nor the self type of $pre might not contain all transitive dependencies" case _ => "" - } override def toMessage(using Context): Message = if ctx.debug then printStackTrace() @@ -70,20 +69,18 @@ extends TypeError: def explanation: String = s"$op $details" - private def recursions: List[RecursionOverflow] = { + private def recursions: List[RecursionOverflow] = import scala.collection.mutable.ListBuffer val result = ListBuffer.empty[RecursionOverflow] - @annotation.tailrec def loop(throwable: Throwable): List[RecursionOverflow] = throwable match { + @annotation.tailrec def loop(throwable: Throwable): List[RecursionOverflow] = throwable match case ro: RecursionOverflow => result += ro loop(ro.previous) case _ => result.toList - } loop(this) - } - def opsString(rs: List[RecursionOverflow])(using Context): String = { + def opsString(rs: List[RecursionOverflow])(using Context): String = val maxShown = 20 if (rs.lengthCompare(maxShown) > 0) i"""${opsString(rs.take(maxShown / 2))} @@ -91,7 +88,6 @@ extends TypeError: |${opsString(rs.takeRight(maxShown / 2))}""" else (rs.map(_.explanation): List[String]).mkString("\n ", "\n| ", "") - } override def toMessage(using Context): Message = val mostCommon = recursions.groupBy(_.op).toList.maxBy(_._2.map(_.weight).sum)._2.reverse @@ -153,7 +149,7 @@ class CyclicReference private (val denot: SymDenotation)(using Context) extends */ def errorMsg(cx: Context): Message = if (cx.mode is Mode.InferringReturnType) - cx.tree match { + cx.tree match case tree: untpd.ValOrDefDef if !tree.tpt.typeOpt.exists => if (inImplicitSearch) TermMemberNeedsResultTypeForImplicitSearch(cycleSym) @@ -165,7 +161,6 @@ class CyclicReference private (val denot: SymDenotation)(using Context) extends errorMsg(cx.outer) case _ => errorMsg(cx.outer) - } // Give up and give generic errors. else if (cycleSym.isOneOf(GivenOrImplicitVal, butNot = Method) && cycleSym.owner.isTerm) diff --git a/compiler/src/dotty/tools/dotc/core/TypeEval.scala b/compiler/src/dotty/tools/dotc/core/TypeEval.scala index b5684b07f181..20331125f9d9 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeEval.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeEval.scala @@ -121,7 +121,7 @@ object TypeEval: c <- extractorC(tp.args(2)) yield runConstantOp(op(a, b, c)) - trace(i"compiletime constant fold $tp", typr, show = true) { + trace(i"compiletime constant fold $tp", typr, show = true): val name = tycon.symbol.name val owner = tycon.symbol.owner val constantType = @@ -240,7 +240,6 @@ object TypeEval: else None constantType.getOrElse(NoType) - } case _ => NoType end tryCompiletimeConstantFold diff --git a/compiler/src/dotty/tools/dotc/core/TypeOps.scala b/compiler/src/dotty/tools/dotc/core/TypeOps.scala index 6809e4b9083c..c1e3aaf7537f 100644 --- a/compiler/src/dotty/tools/dotc/core/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/core/TypeOps.scala @@ -32,8 +32,8 @@ object TypeOps: /** The type `tp` as seen from prefix `pre` and owner `cls`. See the spec * for what this means. */ - final def asSeenFrom(tp: Type, pre: Type, cls: Symbol)(using Context): Type = { - pre match { + final def asSeenFrom(tp: Type, pre: Type, cls: Symbol)(using Context): Type = + pre match case pre: QualSkolemType => // When a selection has an unstable qualifier, the qualifier type gets // wrapped in a `QualSkolemType` so that it may appear soundly as the @@ -51,13 +51,11 @@ object TypeOps: Stats.record("asSeenFrom skolem prefix required") case _ => - } new AsSeenFromMap(pre, cls).apply(tp) - } /** The TypeMap handling the asSeenFrom */ - class AsSeenFromMap(pre: Type, cls: Symbol)(using Context) extends ApproximatingTypeMap, IdempotentCaptRefMap { + class AsSeenFromMap(pre: Type, cls: Symbol)(using Context) extends ApproximatingTypeMap, IdempotentCaptRefMap: /** The number of range approximations in invariant or contravariant positions * performed by this TypeMap. @@ -67,7 +65,7 @@ object TypeOps: */ private[TypeOps] var approxCount: Int = 0 - def apply(tp: Type): Type = { + def apply(tp: Type): Type = /** Map a `C.this` type to the right prefix. If the prefix is unstable, and * the current variance is <= 0, return a range. @@ -75,10 +73,10 @@ object TypeOps: * @param cls The class in which the `C.this` type occurs * @param thiscls The prefix `C` of the `C.this` type. */ - def toPrefix(pre: Type, cls: Symbol, thiscls: ClassSymbol): Type = /*>|>*/ trace.conditionally(track, s"toPrefix($pre, $cls, $thiscls)", show = true) /*<|<*/ { + def toPrefix(pre: Type, cls: Symbol, thiscls: ClassSymbol): Type = /*>|>*/ trace.conditionally(track, s"toPrefix($pre, $cls, $thiscls)", show = true): /*<|<*/ if ((pre eq NoType) || (pre eq NoPrefix) || (cls is PackageClass)) tp - else pre match { + else pre match case pre: SuperType => toPrefix(pre.thistpe, cls, thiscls) case _ => if (thiscls.derivesFrom(cls) && pre.baseType(thiscls).exists) @@ -90,13 +88,11 @@ object TypeOps: toPrefix(pre.select(nme.PACKAGE), cls, thiscls) else toPrefix(pre.baseType(cls).normalizedPrefix, cls.owner, thiscls) - } - } - trace.conditionally(track, s"asSeen ${tp.show} from (${pre.show}, ${cls.show})", show = true) { // !!! DEBUG + trace.conditionally(track, s"asSeen ${tp.show} from (${pre.show}, ${cls.show})", show = true): // !!! DEBUG // All cases except for ThisType are the same as in Map. Inlined for performance // TODO: generalize the inlining trick? - tp match { + tp match case tp: NamedType => val sym = tp.symbol if (sym.isStatic && !sym.maybeOwner.seesOpaques || (tp.prefix `eq` NoPrefix)) tp @@ -109,9 +105,6 @@ object TypeOps: tp case _ => mapOver(tp) - } - } - } override def reapply(tp: Type): Type = // derived infos have already been subjected to asSeenFrom, hence to need to apply the map again. @@ -121,24 +114,22 @@ object TypeOps: assert(approxCount > 0) approxCount -= 1 tp - } def isLegalPrefix(pre: Type)(using Context): Boolean = pre.isStable || !ctx.phase.isTyper /** Implementation of Types#simplified */ - def simplify(tp: Type, theMap: SimplifyMap | Null)(using Context): Type = { + def simplify(tp: Type, theMap: SimplifyMap | Null)(using Context): Type = def mapOver = (if (theMap != null) theMap else new SimplifyMap).mapOver(tp) - tp match { + tp match case tp: NamedType => if (tp.symbol.isStatic || (tp.prefix `eq` NoPrefix)) tp - else tp.derivedSelect(simplify(tp.prefix, theMap)) match { + else tp.derivedSelect(simplify(tp.prefix, theMap)) match case tp1: NamedType if tp1.denotationIsCurrent => val tp2 = tp1.reduceProjection //if (tp2 ne tp1) println(i"simplified $tp1 -> $tp2") tp2 case tp1 => tp1 - } case defn.MatchCase(pat, body) => defn.MatchCase(simplify(pat, theMap), body) case tp: AppliedType => @@ -199,12 +190,9 @@ object TypeOps: tp case _ => mapOver - } - } - class SimplifyMap(using Context) extends IdentityCaptRefMap { + class SimplifyMap(using Context) extends IdentityCaptRefMap: def apply(tp: Type): Type = simplify(tp, this) - } class SimplifyKeepUnchecked(using Context) extends SimplifyMap @@ -224,7 +212,7 @@ object TypeOps: * in a "best effort", ad-hoc way by selectively widening types in `T1, ..., Tn` * and stopping if the resulting union simplifies to a type that is not a disjunction. */ - def orDominator(tp: Type)(using Context): Type = { + def orDominator(tp: Type)(using Context): Type = /** a faster version of cs1 intersect cs2 */ def intersect(cs1: List[ClassSymbol], cs2: List[ClassSymbol]): List[ClassSymbol] = @@ -240,60 +228,53 @@ object TypeOps: case _ => tp.baseClasses /** The minimal set of classes in `cs` which derive all other classes in `cs` */ - def dominators(cs: List[ClassSymbol], accu: List[ClassSymbol]): List[ClassSymbol] = (cs: @unchecked) match { + def dominators(cs: List[ClassSymbol], accu: List[ClassSymbol]): List[ClassSymbol] = (cs: @unchecked) match case c :: rest => val accu1 = if (accu exists (_ derivesFrom c)) accu else c :: accu if (cs == c.baseClasses) accu1 else dominators(rest, accu1) case Nil => // this case can happen because after erasure we do not have a top class anymore assert(ctx.erasedTypes || ctx.reporter.errorsReported) defn.ObjectClass :: Nil - } - def mergeRefinedOrApplied(tp1: Type, tp2: Type): Type = { + def mergeRefinedOrApplied(tp1: Type, tp2: Type): Type = def fail = throw new AssertionError(i"Failure to join alternatives $tp1 and $tp2") def fallback = tp2 match case AndType(tp21, tp22) => mergeRefinedOrApplied(tp1, tp21) & mergeRefinedOrApplied(tp1, tp22) case _ => fail - tp1 match { + tp1 match case tp1 @ RefinedType(parent1, name1, rinfo1) => - tp2 match { + tp2 match case RefinedType(parent2, `name1`, rinfo2) => tp1.derivedRefinedType( mergeRefinedOrApplied(parent1, parent2), name1, rinfo1 | rinfo2) case _ => fallback - } case tp1 @ AppliedType(tycon1, args1) => - tp2 match { + tp2 match case AppliedType(tycon2, args2) => tp1.derivedAppliedType( mergeRefinedOrApplied(tycon1, tycon2), TypeComparer.lubArgs(args1, args2, tycon1.typeParams)) case _ => fallback - } case tp1 @ TypeRef(pre1, _) => - tp2 match { + tp2 match case tp2 @ TypeRef(pre2, _) if tp1.name eq tp2.name => tp1.derivedSelect(pre1 | pre2) case _ => fallback - } case AndType(tp11, tp12) => mergeRefinedOrApplied(tp11, tp2) & mergeRefinedOrApplied(tp12, tp2) case _ => fail - } - } - def approximateOr(tp1: Type, tp2: Type): Type = { - def isClassRef(tp: Type): Boolean = tp match { + def approximateOr(tp1: Type, tp2: Type): Type = + def isClassRef(tp: Type): Boolean = tp match case tp: TypeRef => tp.symbol.isClass case tp: AppliedType => isClassRef(tp.tycon) case tp: RefinedType => isClassRef(tp.parent) case _ => false - } // Step 1: Get RecTypes and ErrorTypes and CapturingTypes out of the way, - tp1 match { + tp1 match case tp1: RecType => return tp1.rebind(approximateOr(tp1.parent, tp2)) case CapturingType(parent1, refs1) => @@ -301,8 +282,7 @@ object TypeOps: case err: ErrorType => return err case _ => - } - tp2 match { + tp2 match case tp2: RecType => return tp2.rebind(approximateOr(tp1, tp2.parent)) case CapturingType(parent2, refs2) => @@ -310,7 +290,6 @@ object TypeOps: case err: ErrorType => return err case _ => - } // Step 2: Try to widen either side. This is tricky and incomplete. // An illustration is in test pos/padTo.scala: Here we need to compute the join of @@ -347,18 +326,16 @@ object TypeOps: // // However, this alone is still not enough. There are other sources of incompleteness, // for instance arising from mis-aligned refinements. - val tp1w = tp1 match { + val tp1w = tp1 match case tp1: TypeProxy if !isClassRef(tp1) => tp1.superType.widenExpr case _ => tp1 - } - val tp2w = tp2 match { + val tp2w = tp2 match case tp2: TypeProxy if !isClassRef(tp2) => tp2.superType.widenExpr case _ => tp2 - } - if ((tp1w ne tp1) || (tp2w ne tp2)) { + if ((tp1w ne tp1) || (tp2w ne tp2)) val isSingle1 = tp1.isInstanceOf[SingletonType] val isSingle2 = tp2.isInstanceOf[SingletonType] - return { + return if (tp2w eq tp2) orDominator(tp1w | tp2) // 2.1 else if (tp1w eq tp1) orDominator(tp1 | tp2w) // 2.1 else if (tp1w frozen_<:< tp2w) orDominator(tp1w | tp2) // 2.2 @@ -368,8 +345,6 @@ object TypeOps: else if (tp1 frozen_<:< tp2w) tp2w // 2.4 else if (tp2 frozen_<:< tp1w) tp1w // 2.5 else orDominator(tp1w | tp2) // 2.6 - } - } def isAccessible(cls: ClassSymbol) = if cls.isOneOf(AccessFlags) || cls.privateWithin.exists then @@ -383,9 +358,8 @@ object TypeOps: def baseTp(cls: ClassSymbol): Type = tp.baseType(cls).mapReduceOr(identity)(mergeRefinedOrApplied) doms.map(baseTp).reduceLeft(AndType.apply) - } - tp match { + tp match case tp: OrType => (tp.tp1.dealias, tp.tp2.dealias) match case (tp1 @ AppliedType(tycon1, args1), tp2 @ AppliedType(tycon2, args2)) @@ -395,8 +369,6 @@ object TypeOps: approximateOr(tp1, tp2) case _ => tp - } - } /** An abstraction of a class info, consisting of * - the intersection of its parents, @@ -404,14 +376,14 @@ object TypeOps: * - abstracted over all type parameters (into a type lambda) * - where all references to `this` of the class are closed over in a RecType. */ - def classBound(info: ClassInfo)(using Context): Type = { + def classBound(info: ClassInfo)(using Context): Type = val cls = info.cls val parentType = info.parents.reduceLeft(TypeComparer.andType(_, _)) def isRefinable(sym: Symbol) = !sym.is(Private) && !sym.isConstructor && !sym.isClass val (refinableDecls, missingDecls) = info.decls.toList.partition(isRefinable) - def addRefinement(parent: Type, decl: Symbol) = { + def addRefinement(parent: Type, decl: Symbol) = val inherited = parentType.findMember(decl.name, cls.thisType, required = EmptyFlags, excluded = Private @@ -430,18 +402,15 @@ object TypeOps: if needsRefinement then RefinedType(parent, decl.name, avoid(decl.info, missingDecls)) else parent - } def close(tp: Type) = RecType.closeOver { rt => tp.subst(cls :: Nil, rt.recThis :: Nil).substThis(cls, rt.recThis) } val raw = refinableDecls.foldLeft(parentType)(addRefinement) - HKTypeLambda.fromParams(cls.typeParams, raw) match { + HKTypeLambda.fromParams(cls.typeParams, raw) match case tl: HKTypeLambda => tl.derivedLambdaType(resType = close(tl.resType)) case tp => close(tp) - } - } /** An approximating map that drops NamedTypes matching `toAvoid` and wildcard types. */ abstract class AvoidMap(using Context) extends AvoidWildcardsMap, IdempotentCaptRefMap: @@ -460,12 +429,11 @@ object TypeOps: try tp match case tp: TermRef if toAvoid(tp) => - tp.info.widenExpr.dealias match { + tp.info.widenExpr.dealias match case info: SingletonType => apply(info) case info => range(defn.NothingType, apply(info)) - } case tp: TypeRef if toAvoid(tp) => - tp.info match { + tp.info match case info: AliasingBounds => apply(info.alias) case TypeBounds(lo, hi) => @@ -474,7 +442,6 @@ object TypeOps: range(defn.NothingType, apply(classBound(info))) case _ => emptyRange // should happen only in error cases - } case tp: ThisType => // ThisType is only used inside a class. // Therefore, either they don't appear in the type to be avoided, or @@ -505,14 +472,13 @@ object TypeOps: override def derivedSelect(tp: NamedType, pre: Type) = if (pre eq tp.prefix) tp - else (if pre.isSingleton then NoType else tryWiden(tp, tp.prefix)).orElse { + else (if pre.isSingleton then NoType else tryWiden(tp, tp.prefix)).orElse: if (tp.isTerm && variance > 0 && !pre.isSingleton) apply(tp.info.widenExpr) else if (upper(pre).member(tp.name).exists) super.derivedSelect(tp, pre) else range(defn.NothingType, defn.AnyType) - } end AvoidMap /** An upper approximation of the given type `tp` that does not refer to any symbol in `symsToAvoid` @@ -533,8 +499,8 @@ object TypeOps: * does not update `ctx.nestingLevel` when entering a block so I'm leaving * this as Future Work™. */ - def avoid(tp: Type, symsToAvoid: => List[Symbol])(using Context): Type = { - val widenMap = new AvoidMap { + def avoid(tp: Type, symsToAvoid: => List[Symbol])(using Context): Type = + val widenMap = new AvoidMap: @threadUnsafe lazy val forbidden = symsToAvoid.toSet def toAvoid(tp: NamedType) = val sym = tp.symbol @@ -547,10 +513,9 @@ object TypeOps: * the set as it is. See comment by @linyxus on #16261. */ override def needsRangeIfInvariant(refs: CaptureSet): Boolean = - refs.elems.exists { + refs.elems.exists: case ref: TermRef => toAvoid(ref) case _ => false - } override def apply(tp: Type): Type = tp match case tp: TypeVar if mapCtx.typerState.constraint.contains(tp) => @@ -563,18 +528,16 @@ object TypeOps: case _ => super.apply(tp) end apply - } widenMap(tp) - } /** If `tpe` is of the form `p.x` where `p` refers to a package * but `x` is not owned by a package, expand it to * * p.package.x */ - def makePackageObjPrefixExplicit(tpe: NamedType)(using Context): Type = { - def tryInsert(pkgClass: SymDenotation): Type = pkgClass match { + def makePackageObjPrefixExplicit(tpe: NamedType)(using Context): Type = + def tryInsert(pkgClass: SymDenotation): Type = pkgClass match case pkg: PackageClassDenotation => var sym = tpe.symbol if !sym.exists && tpe.denot.isOverloaded then @@ -587,16 +550,13 @@ object TypeOps: else tpe case _ => tpe - } if (tpe.symbol.isRoot) tpe else - tpe.prefix match { + tpe.prefix match case pre: ThisType if pre.cls.is(Package) => tryInsert(pre.cls) case pre: TermRef if pre.symbol.is(Package) => tryInsert(pre.symbol.moduleClass) case _ => tpe - } - } /** An argument bounds violation is a triple consisting of * - the argument tree @@ -622,51 +582,45 @@ object TypeOps: boundss: List[TypeBounds], instantiate: (Type, List[Type]) => Type, app: Type)( - using Context): List[BoundsViolation] = withMode(Mode.CheckBoundsOrSelfType) { + using Context): List[BoundsViolation] = withMode(Mode.CheckBoundsOrSelfType): val argTypes = args.tpes /** Replace all wildcards in `tps` with `#` where `` is the * type parameter corresponding to the wildcard. */ - def skolemizeWildcardArgs(tps: List[Type], app: Type) = app match { + def skolemizeWildcardArgs(tps: List[Type], app: Type) = app match case AppliedType(tycon: TypeRef, args) if tycon.typeSymbol.isClass && !Feature.migrateTo3 => - tps.zipWithConserve(tycon.typeSymbol.typeParams) { - (tp, tparam) => tp match { + tps.zipWithConserve(tycon.typeSymbol.typeParams): + (tp, tparam) => tp match case _: TypeBounds => app.select(tparam) case _ => tp - } - } case _ => tps - } // Skolemized argument types are used to substitute in F-bounds. val skolemizedArgTypes = skolemizeWildcardArgs(argTypes, app) val violations = new mutable.ListBuffer[BoundsViolation] - def checkOverlapsBounds(lo: Type, hi: Type, arg: Tree, bounds: TypeBounds): Unit = { + def checkOverlapsBounds(lo: Type, hi: Type, arg: Tree, bounds: TypeBounds): Unit = //println(i" = ${instantiate(bounds.hi, argTypes)}") var checkCtx = ctx // the context to be used for bounds checking - if (argTypes ne skolemizedArgTypes) { // some of the arguments are wildcards + if (argTypes ne skolemizedArgTypes) // some of the arguments are wildcards /** Is there a `LazyRef(TypeRef(_, sym))` reference in `tp`? */ - def isLazyIn(sym: Symbol, tp: Type): Boolean = { - def isReference(tp: Type) = tp match { + def isLazyIn(sym: Symbol, tp: Type): Boolean = + def isReference(tp: Type) = tp match case tp: LazyRef => tp.ref.isInstanceOf[TypeRef] && tp.ref.typeSymbol == sym case _ => false - } tp.existsPart(isReference, forceLazy = false) - } /** The argument types of the form `TypeRef(_, sym)` which appear as a LazyRef in `bounds`. * This indicates that the application is used as an F-bound for the symbol referred to in the LazyRef. */ - val lazyRefs = skolemizedArgTypes collect { + val lazyRefs = skolemizedArgTypes collect: case tp: TypeRef if isLazyIn(tp.symbol, bounds) => tp.symbol - } - for (sym <- lazyRefs) { + for (sym <- lazyRefs) // If symbol `S` has an F-bound such as `C[?, S]` that contains wildcards, // add a modifieed bound where wildcards are skolemized as a GADT bound for `S`. @@ -676,49 +630,41 @@ object TypeOps: // This mirrors what we do for the bounds that are checked and allows us thus // to bounds-check F-bounds with wildcards. A test case is pos/i6146.scala. - def massage(tp: Type): Type = tp match { + def massage(tp: Type): Type = tp match case tp @ AppliedType(tycon, args) => tp.derivedAppliedType(tycon, skolemizeWildcardArgs(args, tp)) case tp: AndOrType => tp.derivedAndOrType(massage(tp.tp1), massage(tp.tp2)) case _ => tp - } - def narrowBound(bound: Type, fromBelow: Boolean): Unit = { + def narrowBound(bound: Type, fromBelow: Boolean): Unit = val bound1 = massage(bound) - if (bound1 ne bound) { + if (bound1 ne bound) if (checkCtx eq ctx) checkCtx = ctx.fresh.setFreshGADTBounds if (!checkCtx.gadt.contains(sym)) checkCtx.gadtState.addToConstraint(sym) checkCtx.gadtState.addBound(sym, bound1, fromBelow) typr.println("install GADT bound $bound1 for when checking F-bounded $sym") - } - } narrowBound(sym.info.loBound, fromBelow = true) narrowBound(sym.info.hiBound, fromBelow = false) - } - } val hiBound = instantiate(bounds.hi, skolemizedArgTypes) val loBound = instantiate(bounds.lo, skolemizedArgTypes) - def check(using Context) = { + def check(using Context) = if (!(lo <:< hiBound)) violations += ((arg, "upper", hiBound)) if (!(loBound <:< hi)) violations += ((arg, "lower", loBound)) - } check(using checkCtx) - } def loop(args: List[Tree], boundss: List[TypeBounds]): Unit = args match case arg :: args1 => boundss match - case bounds :: boundss1 => - arg.tpe match - case TypeBounds(lo, hi) => checkOverlapsBounds(lo, hi, arg, bounds) - case tp => checkOverlapsBounds(tp, tp, arg, bounds) - loop(args1, boundss1) - case _ => + case bounds :: boundss1 => + arg.tpe match + case TypeBounds(lo, hi) => checkOverlapsBounds(lo, hi, arg, bounds) + case tp => checkOverlapsBounds(tp, tp, arg, bounds) + loop(args1, boundss1) + case _ => case _ => loop(args, boundss) violations.toList - } /** Refine child based on parent * @@ -739,7 +685,7 @@ object TypeOps: * If the subtyping is true, the instantiated type `p.child[Vs]` is * returned. Otherwise, `NoType` is returned. */ - def refineUsingParent(parent: Type, child: Symbol, mixins: List[Type] = Nil)(using Context): Type = { + def refineUsingParent(parent: Type, child: Symbol, mixins: List[Type] = Nil)(using Context): Type = // is a place holder from Scalac, it is hopeless to instantiate it. // // Quote from scalac (from nsc/symtab/classfile/Pickler.scala): @@ -753,10 +699,8 @@ object TypeOps: val childTp = if (child.isTerm) child.termRef else child.typeRef - inContext(ctx.fresh.setExploreTyperState().setFreshGADTBounds.addMode(Mode.GadtConstraintInference)) { + inContext(ctx.fresh.setExploreTyperState().setFreshGADTBounds.addMode(Mode.GadtConstraintInference)): instantiateToSubType(childTp, parent, mixins).dealias - } - } /** Instantiate type `tp1` to be a subtype of `tp2` * @@ -765,7 +709,7 @@ object TypeOps: * * Otherwise, return NoType. */ - private def instantiateToSubType(tp1: NamedType, tp2: Type, mixins: List[Type])(using Context): Type = trace(i"instantiateToSubType($tp1, $tp2, $mixins)", typr) { + private def instantiateToSubType(tp1: NamedType, tp2: Type, mixins: List[Type])(using Context): Type = trace(i"instantiateToSubType($tp1, $tp2, $mixins)", typr): // In order for a child type S to qualify as a valid subtype of the parent // T, we need to test whether it is possible S <: T. // @@ -776,10 +720,10 @@ object TypeOps: // 2. Replace `A.this.C` with `A#C` (see tests/patmat/i12681.scala) // 3. Replace non-reducing MatchType with its bound // - val approximateParent = new TypeMap { + val approximateParent = new TypeMap: val boundTypeParams = util.HashMap[TypeRef, TypeVar]() - def apply(tp: Type): Type = tp.dealias match { + def apply(tp: Type): Type = tp.dealias match case tp: MatchType => val reduced = tp.reduced if reduced.exists then tp // break cycles @@ -823,28 +767,25 @@ object TypeOps: // Note that `HKTypeLambda.resType` may contain TypeParamRef that are // bound in the HKTypeLambda. This is fine, as the TypeComparer will // recurse on the bounds of `TypeParamRef`. - val bounds: TypeBounds = tycon.underlying match { + val bounds: TypeBounds = tycon.underlying match case TypeBounds(tl1: HKTypeLambda, tl2: HKTypeLambda) => TypeBounds(tl1.resType, tl2.resType) case TypeBounds(tl1: HKTypeLambda, tp2) => TypeBounds(tl1.resType, tp2) case TypeBounds(tp1, tl2: HKTypeLambda) => TypeBounds(tp1, tl2.resType) - } newTypeVar(bounds) case tp => mapOver(tp) - } - } /** Gather GADT symbols and `ThisType`s found in `tp2`, ie. the scrutinee. */ object TraverseTp2 extends TypeTraverser: val thisTypes = util.HashSet[ThisType]() val gadtSyms = new mutable.ListBuffer[Symbol] - def traverse(tp: Type) = { + def traverse(tp: Type) = val tpd = tp.dealias if tpd ne tp then traverse(tpd) else tp match @@ -862,7 +803,6 @@ object TypeOps: traverse(sym.thisType) case _ => traverseChildren(tp) - } TraverseTp2.traverse(tp2) val thisTypes = TraverseTp2.thisTypes val gadtSyms = TraverseTp2.gadtSyms.toList @@ -872,9 +812,9 @@ object TypeOps: // 2. replace it with `X.Child` where `X <: p.C`, stripping ThisType in `p` recursively. // // See tests/patmat/i3938.scala, tests/pos/i15029.more.scala, tests/pos/i16785.scala - class InferPrefixMap extends TypeMap { + class InferPrefixMap extends TypeMap: var prefixTVar: Type | Null = null - def apply(tp: Type): Type = tp match { + def apply(tp: Type): Type = tp match case tp @ ThisType(tref) if !tref.symbol.isStaticOwner => val symbol = tref.symbol if thisTypes.contains(tp) then @@ -884,17 +824,14 @@ object TypeOps: TermRef(this(tref.prefix), symbol.sourceModule) else if (prefixTVar != null) this(tref) - else { + else prefixTVar = WildcardType // prevent recursive call from assigning it // e.g. tests/pos/i15029.more.scala, create a TypeVar for `Instances`' B, so we can disregard `Ints` val tvars = tref.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } val tref2 = this(tref.applyIfParameterized(tvars)) prefixTVar = newTypeVar(TypeBounds.upper(tref2), DepParamName.fresh(tref.name)) prefixTVar.uncheckedNN - } case tp => mapOver(tp) - } - } val inferThisMap = new InferPrefixMap val tvars = tp1.typeParams.map { tparam => newTypeVar(tparam.paramInfo.bounds, DepParamName.fresh(tparam.paramName)) } @@ -912,19 +849,16 @@ object TypeOps: parent.argInfos.nonEmpty && approximateParent(parent) <:< tp2 } - def instantiate(): Type = { + def instantiate(): Type = for tp <- mixins.reverseIterator do protoTp1 <:< tp maximizeType(protoTp1, NoSpan) wildApprox(protoTp1) - } if (protoTp1 <:< tp2) instantiate() - else { + else val approxTp2 = approximateParent(tp2) if (protoTp1 <:< approxTp2 || parentQualify(protoTp1, approxTp2)) instantiate() else NoType - } - } def nestedPairs(ts: List[Type])(using Context): Type = ts.foldRight(defn.EmptyTupleModule.termRef: Type)(defn.PairClass.typeRef.appliedTo(_, _)) diff --git a/compiler/src/dotty/tools/dotc/core/TyperState.scala b/compiler/src/dotty/tools/dotc/core/TyperState.scala index d2df2a2aebef..3feab5d1bd0e 100644 --- a/compiler/src/dotty/tools/dotc/core/TyperState.scala +++ b/compiler/src/dotty/tools/dotc/core/TyperState.scala @@ -15,7 +15,7 @@ import Decorators._ import scala.annotation.internal.sharable -object TyperState { +object TyperState: @sharable private var nextId: Int = 0 def initialState() = TyperState() @@ -39,9 +39,8 @@ object TyperState { ts.constraint = constraint ts.ownedVars = ownedVars ts.upLevels = upLevels -} -class TyperState() { +class TyperState(): import TyperState.LevelMap private var myId: Int = _ @@ -59,12 +58,11 @@ class TyperState() { private var myConstraint: Constraint = _ def constraint: Constraint = myConstraint - def constraint_=(c: Constraint)(using Context): Unit = { + def constraint_=(c: Constraint)(using Context): Unit = if (Config.debugCheckConstraintsClosed && isGlobalCommittable) c.checkClosed() myConstraint = c if Config.checkConsistentVars && !ctx.reporter.errorsReported then c.checkConsistentVars() - } private var previousConstraint: Constraint = _ @@ -156,7 +154,7 @@ class TyperState() { * isApplicableSafe but also for (e.g. erased-lubs.scala) as well as * many parts of dotty itself. */ - def commit()(using Context): Unit = { + def commit()(using Context): Unit = Stats.record("typerState.commit") assert(isCommittable, s"$this is not committable") assert(!isCommitted, s"$this is already committed") @@ -192,7 +190,6 @@ class TyperState() { targetState.gc() isCommitted = true ownedVars = SimpleIdentitySet.empty - } /** Ensure that this constraint does not associate different TypeVars for the * same type lambda than the `other` constraint. Do this by renaming type lambdas @@ -235,7 +232,7 @@ class TyperState() { }) && // Integrate the additional constraints on type variables from `other` // and merge hardness markers - constraint.uninstVars.forall(tv => + constraint.uninstVars.forall(tv => if other.isHard(tv) then constraint = constraint.withHard(tv) val p = tv.origin val otherLos = other.lower(p) @@ -295,12 +292,10 @@ class TyperState() { for tl <- toCollect do constraint = constraint.remove(tl) - override def toString: String = { + override def toString: String = def ids(state: TyperState): List[String] = s"${state.id}${if (state.isCommittable) "" else "X"}" :: (if (state.previous == null) Nil else ids(state.previous.uncheckedNN)) s"TS[${ids(this).mkString(", ")}]" - } def stateChainStr: String = s"$this${if (previous == null) "" else previous.uncheckedNN.stateChainStr}" -} diff --git a/compiler/src/dotty/tools/dotc/core/Types.scala b/compiler/src/dotty/tools/dotc/core/Types.scala index bbec037ebef1..5aff01ff928e 100644 --- a/compiler/src/dotty/tools/dotc/core/Types.scala +++ b/compiler/src/dotty/tools/dotc/core/Types.scala @@ -45,7 +45,7 @@ import scala.annotation.threadUnsafe import dotty.tools.dotc.transform.SymUtils._ import dotty.tools.dotc.transform.TypeUtils.isErasedClass -object Types { +object Types: @sharable private var nextId = 0 @@ -91,7 +91,7 @@ object Types { * * Note: please keep in sync with copy in `docs/docs/internals/type-system.md`. */ - abstract class Type extends Hashable with printing.Showable { + abstract class Type extends Hashable with printing.Showable: // ----- Tests ----------------------------------------------------- @@ -119,7 +119,7 @@ object Types { if t.mightBeProvisional then t.mightBeProvisional = t match case t: TypeRef => - t.currentSymbol.isProvisional || !t.currentSymbol.isStatic && { + t.currentSymbol.isProvisional || !t.currentSymbol.isStatic `&&`: (t: Type).mightBeProvisional = false // break cycles test(t.prefix, theAcc) || t.denot.infoOrCompleter.match @@ -127,7 +127,6 @@ object Types { case info: AliasingBounds => test(info.alias, theAcc) case TypeBounds(lo, hi) => test(lo, theAcc) || test(hi, theAcc) case _ => false - } case t: TermRef => !t.currentSymbol.isStatic && test(t.prefix, theAcc) case t: AppliedType => @@ -164,7 +163,7 @@ object Types { * Like in isStableMember, "stability" means idempotence. * Rationale: If an expression has a stable type, the expression must be idempotent, so stable types * must be singleton types of stable expressions. */ - final def isStable(using Context): Boolean = stripTypeVar match { + final def isStable(using Context): Boolean = stripTypeVar match case tp: TermRef => tp.symbol.isStableMember && tp.prefix.isStable || tp.info.isStable case _: SingletonType | NoPrefix => true case tp: RefinedOrRecType => tp.parent.isStable @@ -181,7 +180,6 @@ object Types { tp.tp2.isStable && (realizability(tp.tp1) eq Realizable) case tp: AppliedType => tp.cachedIsStable case _ => false - } /** Is this type a (possibly refined, applied, aliased or annotated) type reference * to the given type symbol? @@ -189,12 +187,11 @@ object Types { * It makes no sense for it to be an alias type because isRef would always * return false in that case. */ - def isRef(sym: Symbol, skipRefined: Boolean = true)(using Context): Boolean = this match { + def isRef(sym: Symbol, skipRefined: Boolean = true)(using Context): Boolean = this match case this1: TypeRef => - this1.info match { // see comment in Namer#typeDefSig + this1.info match // see comment in Namer#typeDefSig case TypeAlias(tp) => tp.isRef(sym, skipRefined) case _ => this1.symbol eq sym - } case this1: RefinedOrRecType if skipRefined => this1.parent.isRef(sym, skipRefined) case this1: AppliedType => @@ -206,16 +203,14 @@ object Types { case this1: AnnotatedType => this1.parent.isRef(sym, skipRefined) case _ => false - } /** Is this type a (neither aliased nor applied nor annotated) reference to class `sym`? */ - def isDirectRef(sym: Symbol)(using Context): Boolean = stripTypeVar match { + def isDirectRef(sym: Symbol)(using Context): Boolean = stripTypeVar match case this1: TypeRef => this1.name == sym.name && // avoid forcing info if names differ (this1.symbol eq sym) case _ => false - } def isAny(using Context): Boolean = isRef(defn.AnyClass, skipRefined = false) def isAnyRef(using Context): Boolean = isRef(defn.ObjectClass, skipRefined = false) @@ -226,25 +221,22 @@ object Types { case _ => false /** Is this type exactly Null (no vars, aliases, refinements etc allowed)? */ - def isExactlyNull(using Context): Boolean = this match { + def isExactlyNull(using Context): Boolean = this match case tp: TypeRef => tp.name == tpnme.Null && (tp.symbol eq defn.NullClass) case _ => false - } /** Is this type exactly Nothing (no vars, aliases, refinements etc allowed)? */ - def isExactlyNothing(using Context): Boolean = this match { + def isExactlyNothing(using Context): Boolean = this match case tp: TypeRef => tp.name == tpnme.Nothing && (tp.symbol eq defn.NothingClass) case _ => false - } /** Is this type exactly Any (no vars, aliases, refinements etc allowed)? */ - def isExactlyAny(using Context): Boolean = this match { + def isExactlyAny(using Context): Boolean = this match case tp: TypeRef => tp.name == tpnme.Any && (tp.symbol eq defn.AnyClass) case _ => false - } def isBottomType(using Context): Boolean = if ctx.mode.is(Mode.SafeNulls) && !ctx.phase.erasedTypes then hasClassSymbol(defn.NothingClass) @@ -257,12 +249,12 @@ object Types { /** True if this type is an instance of the given `cls` or an instance of * a non-bottom subclass of `cls`. */ - final def derivesFrom(cls: Symbol)(using Context): Boolean = { + final def derivesFrom(cls: Symbol)(using Context): Boolean = def isLowerBottomType(tp: Type) = tp.isBottomType && (tp.hasClassSymbol(defn.NothingClass) || cls != defn.NothingClass && !cls.isValueClass) - def loop(tp: Type): Boolean = tp match { + def loop(tp: Type): Boolean = tp match case tp: TypeRef => val sym = tp.symbol if (sym.isClass) sym.derivesFrom(cls) else loop(tp.superType) @@ -288,9 +280,7 @@ object Types { cls == defn.ObjectClass case _ => false - } loop(this) - } def isFromJavaObject(using Context): Boolean = isRef(defn.ObjectClass) && (typeSymbol eq defn.FromJavaObjectSymbol) @@ -318,8 +308,8 @@ object Types { def isSingleton(using Context): Boolean = dealias.isInstanceOf[SingletonType] /** Is this type of kind `AnyKind`? */ - def hasAnyKind(using Context): Boolean = { - @tailrec def loop(tp: Type): Boolean = tp match { + def hasAnyKind(using Context): Boolean = + @tailrec def loop(tp: Type): Boolean = tp match case tp: TypeRef => val sym = tp.symbol if (sym.isClass) sym == defn.AnyKindClass else loop(tp.translucentSuperType) @@ -327,12 +317,10 @@ object Types { loop(tp.underlying) // underlying OK here since an AnyKinded type cannot be a type argument of another type case _ => false - } loop(this) - } /** Is this type guaranteed not to have `null` as a value? */ - final def isNotNull(using Context): Boolean = this match { + final def isNotNull(using Context): Boolean = this match case tp: ConstantType => tp.value.value != null case tp: ClassInfo => !tp.cls.isNullableClass && tp.cls != defn.NothingClass case tp: AppliedType => tp.superType.isNotNull @@ -341,7 +329,6 @@ object Types { case AndType(tp1, tp2) => tp1.isNotNull || tp2.isNotNull case OrType(tp1, tp2) => tp1.isNotNull && tp2.isNotNull case _ => false - } /** Is this type produced as a repair for an error? */ final def isError(using Context): Boolean = stripTypeVar.isInstanceOf[ErrorType] @@ -371,19 +358,17 @@ object Types { case _ => false /** Does the type carry an annotation that is an instance of `cls`? */ - @tailrec final def hasAnnotation(cls: ClassSymbol)(using Context): Boolean = stripTypeVar match { + @tailrec final def hasAnnotation(cls: ClassSymbol)(using Context): Boolean = stripTypeVar match case AnnotatedType(tp, annot) => (annot matches cls) || (tp hasAnnotation cls) case _ => false - } /** Does this type have a supertype with an annotation satisfying given predicate `p`? */ - def derivesAnnotWith(p: Annotation => Boolean)(using Context): Boolean = this match { + def derivesAnnotWith(p: Annotation => Boolean)(using Context): Boolean = this match case tp: AnnotatedType => p(tp.annot) || tp.parent.derivesAnnotWith(p) case tp: TypeProxy => tp.superType.derivesAnnotWith(p) case AndType(l, r) => l.derivesAnnotWith(p) || r.derivesAnnotWith(p) case OrType(l, r) => l.derivesAnnotWith(p) && r.derivesAnnotWith(p) case _ => false - } /** Does this type occur as a part of type `that`? */ def occursIn(that: Type)(using Context): Boolean = @@ -404,17 +389,15 @@ object Types { /** Is this the type of a method that has a repeated parameter type as * last parameter type? */ - def isVarArgsMethod(using Context): Boolean = stripPoly match { + def isVarArgsMethod(using Context): Boolean = stripPoly match case mt: MethodType => mt.paramInfos.nonEmpty && mt.paramInfos.last.isRepeatedParam case _ => false - } /** Is this the type of a method with a leading empty parameter list? */ - def isNullaryMethod(using Context): Boolean = stripPoly match { + def isNullaryMethod(using Context): Boolean = stripPoly match case MethodType(Nil) => true case _ => false - } /** Is this an alias TypeBounds? */ final def isTypeAlias: Boolean = this.isInstanceOf[TypeAlias] @@ -432,12 +415,11 @@ object Types { */ def isMatch(using Context): Boolean = underlyingMatchType.exists - def underlyingMatchType(using Context): Type = stripped match { + def underlyingMatchType(using Context): Type = stripped match case tp: MatchType => tp case tp: HKTypeLambda => tp.resType.underlyingMatchType case tp: AppliedType if tp.isMatchAlias => tp.superType.underlyingMatchType case _ => NoType - } /** Is this a higher-kinded type lambda with given parameter variances? * These lambdas are used as the RHS of higher-kinded abstract types or @@ -492,27 +474,24 @@ object Types { new NamedPartsAccumulator(p).apply(Nil, this) /** Map function `f` over elements of an AndType, rebuilding with function `g` */ - def mapReduceAnd[T](f: Type => T)(g: (T, T) => T)(using Context): T = stripTypeVar match { + def mapReduceAnd[T](f: Type => T)(g: (T, T) => T)(using Context): T = stripTypeVar match case AndType(tp1, tp2) => g(tp1.mapReduceAnd(f)(g), tp2.mapReduceAnd(f)(g)) case _ => f(this) - } /** Map function `f` over elements of an OrType, rebuilding with function `g` */ - final def mapReduceOr[T](f: Type => T)(g: (T, T) => T)(using Context): T = stripTypeVar match { + final def mapReduceOr[T](f: Type => T)(g: (T, T) => T)(using Context): T = stripTypeVar match case OrType(tp1, tp2) => g(tp1.mapReduceOr(f)(g), tp2.mapReduceOr(f)(g)) case _ => f(this) - } // ----- Associated symbols ---------------------------------------------- /** The type symbol associated with the type */ - @tailrec final def typeSymbol(using Context): Symbol = this match { + @tailrec final def typeSymbol(using Context): Symbol = this match case tp: TypeRef => tp.symbol case tp: TypeProxy => tp.underlying.typeSymbol case tp: ClassInfo => tp.cls case _: JavaArrayType => defn.ArrayClass case _ => NoSymbol - } /** The least class or trait of which this type is a subtype or parameterized * instance, or NoSymbol if none exists (either because this type is not a @@ -552,7 +531,7 @@ object Types { /** The least (wrt <:<) set of symbols satisfying the `include` predicate of which this type is a subtype */ - final def parentSymbols(include: Symbol => Boolean)(using Context): List[Symbol] = this match { + final def parentSymbols(include: Symbol => Boolean)(using Context): List[Symbol] = this match case tp: TypeRef => val sym = tp.symbol if (include(sym)) sym :: Nil else tp.superType.parentSymbols(include) @@ -566,7 +545,6 @@ object Types { l.parentSymbols(include) intersect r.parentSymbols(include) // TODO does not conform to spec case _ => Nil - } /** The least (wrt <:<) set of class symbols of which this type is a subtype */ @@ -606,11 +584,10 @@ object Types { case _ => false /** The term symbol associated with the type */ - @tailrec final def termSymbol(using Context): Symbol = this match { + @tailrec final def termSymbol(using Context): Symbol = this match case tp: TermRef => tp.symbol case tp: TypeProxy => tp.underlying.termSymbol case _ => NoSymbol - } /** The base classes of this type as determined by ClassDenotation * in linearization order, with the class itself as first element. @@ -635,23 +612,21 @@ object Types { * Defined by ClassInfo, inherited by type proxies. * Empty scope for all other types. */ - @tailrec final def decls(using Context): Scope = this match { + @tailrec final def decls(using Context): Scope = this match case tp: ClassInfo => tp.decls case tp: TypeProxy => tp.superType.decls case _ => EmptyScope - } /** A denotation containing the declaration(s) in this type with the given name. * The result is either a SymDenotation or a MultiDenotation of SymDenotations. * The info(s) are the original symbol infos, no translation takes place. */ - final def decl(name: Name)(using Context): Denotation = { + final def decl(name: Name)(using Context): Denotation = record("decl") findDecl(name, EmptyFlags) - } /** A denotation containing the non-private declaration(s) in this type with the given name */ final def nonPrivateDecl(name: Name)(using Context): Denotation = @@ -661,7 +636,7 @@ object Types { * name, as seen from prefix type `pre`. Declarations that have a flag * in `excluded` are omitted. */ - @tailrec final def findDecl(name: Name, excluded: FlagSet)(using Context): Denotation = this match { + @tailrec final def findDecl(name: Name, excluded: FlagSet)(using Context): Denotation = this match case tp: ClassInfo => tp.decls.denotsNamed(name).filterWithFlags(EmptyFlags, excluded).toDenot(NoPrefix) case tp: TypeProxy => @@ -670,36 +645,31 @@ object Types { newErrorSymbol(classSymbol orElse defn.RootClass, name, err.msg) case _ => NoDenotation - } /** The member of this type with the given name */ - final def member(name: Name)(using Context): Denotation = { + final def member(name: Name)(using Context): Denotation = record("member") memberBasedOnFlags(name, required = EmptyFlags, excluded = EmptyFlags) - } /** The non-private member of this type with the given name. */ - final def nonPrivateMember(name: Name)(using Context): Denotation = { + final def nonPrivateMember(name: Name)(using Context): Denotation = record("nonPrivateMember") memberBasedOnFlags(name, required = EmptyFlags, excluded = Flags.Private) - } /** The member with given `name` and required and/or excluded flags */ - final def memberBasedOnFlags(name: Name, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags)(using Context): Denotation = { + final def memberBasedOnFlags(name: Name, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags)(using Context): Denotation = // We need a valid prefix for `asSeenFrom` - val pre = this match { + val pre = this match case tp: ClassInfo => tp.appliedRef case _ => widenIfUnstable - } findMember(name, pre, required, excluded) - } /** Find member of this type with given `name`, all `required` * flags and no `excluded` flag and produce a denotation that contains * the type of the member as seen from given prefix `pre`. */ - final def findMember(name: Name, pre: Type, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags)(using Context): Denotation = { - @tailrec def go(tp: Type): Denotation = tp match { + final def findMember(name: Name, pre: Type, required: FlagSet = EmptyFlags, excluded: FlagSet = EmptyFlags)(using Context): Denotation = + @tailrec def go(tp: Type): Denotation = tp match case tp: TermRef => go (tp.underlying match { case mt: MethodType @@ -707,23 +677,20 @@ object Types { case tp1 => tp1 }) case tp: TypeRef => - tp.denot match { + tp.denot match case d: ClassDenotation => d.findMember(name, pre, required, excluded) case d => go(d.info) - } case tp: AppliedType => - tp.tycon match { + tp.tycon match case tc: TypeRef => if (tc.symbol.isClass) go(tc) - else { + else val normed = tp.tryNormalize go(if (normed.exists) normed else tp.superType) - } case tc: HKTypeLambda => goApplied(tp, tc) case _ => go(tp.superType) - } case tp: ThisType => // ??? inline goThis(tp) case tp: RefinedType => @@ -751,7 +718,6 @@ object Types { newErrorSymbol(pre.classSymbol orElse defn.RootClass, name, err.msg) case _ => NoDenotation - } def goRec(tp: RecType) = // TODO: change tp.parent to nullable or other values if ((tp.parent: Type | Null) == null) NoDenotation @@ -775,11 +741,11 @@ object Types { finally rt.openedWithPrefix = NoType end goRec - def goRefined(tp: RefinedType) = { + def goRefined(tp: RefinedType) = val pdenot = go(tp.parent) val pinfo = pdenot.info val rinfo = tp.refinedInfo - if (name.isTypeName && !pinfo.isInstanceOf[ClassInfo]) { // simplified case that runs more efficiently + if (name.isTypeName && !pinfo.isInstanceOf[ClassInfo]) // simplified case that runs more efficiently val jointInfo = if rinfo.isInstanceOf[TypeAlias] && !ctx.mode.is(Mode.CheckBoundsOrSelfType) then // In normal situations, the only way to "improve" on rinfo is to return an empty type bounds @@ -792,7 +758,6 @@ object Types { else pinfo recoverable_& rinfo pdenot.asSingleDenotation.derivedSingleDenotation(pdenot.symbol, jointInfo) - } else val isRefinedMethod = rinfo.isInstanceOf[MethodOrPoly] val joint = pdenot.meet( @@ -806,7 +771,6 @@ object Types { joint.derivedSingleDenotation(joint.symbol, rinfo, pre, isRefinedMethod) case _ => joint - } def goApplied(tp: AppliedType, tycon: HKTypeLambda) = go(tycon.resType).mapInfo(info => @@ -840,22 +804,19 @@ object Types { // loadClassWithPrivateInnerAndSubSelf in ShowClassTests go(tp.cls.typeRef) orElse d - def goParam(tp: TypeParamRef) = { + def goParam(tp: TypeParamRef) = val next = tp.underlying - ctx.typerState.constraint.entry(tp) match { + ctx.typerState.constraint.entry(tp) match case bounds: TypeBounds if bounds ne next => go(bounds.hi) case _ => go(next) - } - } - def goSuper(tp: SuperType) = go(tp.underlying) match { + def goSuper(tp: SuperType) = go(tp.underlying) match case d: JointRefDenotation => typr.println(i"redirecting super.$name from $tp to ${d.symbol.showLocated}") new UniqueRefDenotation(d.symbol, tp.memberInfo(d.symbol), d.validFor, pre) case d => d - } def goAnd(l: Type, r: Type) = go(l).meet(go(r), pre, safeIntersection = ctx.base.pendingMemberSearches.contains(name)) @@ -883,25 +844,21 @@ object Types { ctx.base.pendingMemberSearches = name :: ctx.base.pendingMemberSearches ctx.base.findMemberCount = recCount + 1 try go(this) - catch { + catch case ex: Throwable => core.println(s"findMember exception for $this member $name, pre = $pre, recCount = $recCount") - def showPrefixSafely(pre: Type)(using Context): String = pre.stripTypeVar match { + def showPrefixSafely(pre: Type)(using Context): String = pre.stripTypeVar match case pre: TermRef => i"${pre.symbol.name}." case pre: TypeRef => i"${pre.symbol.name}#" case pre: TypeProxy => showPrefixSafely(pre.superType) case _ => if (pre.typeSymbol.exists) i"${pre.typeSymbol.name}#" else "." - } handleRecursive("find-member", i"${showPrefixSafely(pre)}$name", ex) - } - finally { + finally if (recCount >= Config.LogPendingFindMemberThreshold) ctx.base.pendingMemberSearches = ctx.base.pendingMemberSearches.tail ctx.base.findMemberCount = recCount - } - } /** The set of names of members of this type that pass the given name filter * when seen as members of `pre`. More precisely, these are all @@ -909,7 +866,7 @@ object Types { * @note: OK to use a Set[Name] here because Name hashcodes are replayable, * hence the Set will always give the same names in the same order. */ - final def memberNames(keepOnly: NameFilter, pre: Type = this)(using Context): Set[Name] = this match { + final def memberNames(keepOnly: NameFilter, pre: Type = this)(using Context): Set[Name] = this match case tp: ClassInfo => val names = tp.cls.classDenot.memberNames(keepOnly) if keepOnly.isStable then names else names.filter(keepOnly(pre, _)) @@ -924,20 +881,17 @@ object Types { tp.tp1.memberNames(keepOnly, pre) & tp.tp2.memberNames(keepOnly, pre) case _ => Set() - } - def memberDenots(keepOnly: NameFilter, f: (Name, mutable.Buffer[SingleDenotation]) => Unit)(using Context): Seq[SingleDenotation] = { + def memberDenots(keepOnly: NameFilter, f: (Name, mutable.Buffer[SingleDenotation]) => Unit)(using Context): Seq[SingleDenotation] = val buf = mutable.ListBuffer[SingleDenotation]() for (name <- memberNames(keepOnly)) f(name, buf) buf.toList - } /** The set of abstract term members of this type. */ - final def abstractTermMembers(using Context): Seq[SingleDenotation] = { + final def abstractTermMembers(using Context): Seq[SingleDenotation] = record("abstractTermMembers") memberDenots(abstractTermNameFilter, (name, buf) => buf ++= nonPrivateMember(name).altsWith(_.is(Deferred))) - } /** * Returns the set of methods that are abstract and do not overlap with any of @@ -953,7 +907,7 @@ object Types { * @return the set of methods that are abstract and do not match any of [[java.lang.Object]] * */ - final def possibleSamMethods(using Context): Seq[SingleDenotation] = { + final def possibleSamMethods(using Context): Seq[SingleDenotation] = record("possibleSamMethods") atPhaseNoLater(erasurePhase) { abstractTermMembers.toList.filterConserve { m => @@ -962,69 +916,59 @@ object Types { && !m.symbol.isInlineMethod } }.map(_.current) - } /** The set of abstract type members of this type. */ - final def abstractTypeMembers(using Context): Seq[SingleDenotation] = { + final def abstractTypeMembers(using Context): Seq[SingleDenotation] = record("abstractTypeMembers") memberDenots(abstractTypeNameFilter, (name, buf) => buf += nonPrivateMember(name).asSingleDenotation) - } /** The set of abstract type members of this type. */ - final def nonClassTypeMembers(using Context): Seq[SingleDenotation] = { + final def nonClassTypeMembers(using Context): Seq[SingleDenotation] = record("nonClassTypeMembers") memberDenots(nonClassTypeNameFilter, (name, buf) => buf += member(name).asSingleDenotation) - } /** The set of type alias members of this type */ - final def typeAliasMembers(using Context): Seq[SingleDenotation] = { + final def typeAliasMembers(using Context): Seq[SingleDenotation] = record("typeAliasMembers") memberDenots(typeAliasNameFilter, (name, buf) => buf += member(name).asSingleDenotation) - } /** The set of type members of this type */ - final def typeMembers(using Context): Seq[SingleDenotation] = { + final def typeMembers(using Context): Seq[SingleDenotation] = record("typeMembers") memberDenots(typeNameFilter, (name, buf) => buf += member(name).asSingleDenotation) - } /** The set of implicit term members of this type */ - final def implicitMembers(using Context): List[TermRef] = { + final def implicitMembers(using Context): List[TermRef] = record("implicitMembers") memberDenots(implicitFilter, (name, buf) => buf ++= member(name).altsWith(_.isOneOf(GivenOrImplicitVal))) .toList.map(d => TermRef(this, d.symbol.asTerm)) - } /** The set of member classes of this type */ - final def memberClasses(using Context): Seq[SingleDenotation] = { + final def memberClasses(using Context): Seq[SingleDenotation] = record("memberClasses") memberDenots(typeNameFilter, (name, buf) => buf ++= member(name).altsWith(x => x.isClass)) - } - final def fields(using Context): Seq[SingleDenotation] = { + final def fields(using Context): Seq[SingleDenotation] = record("fields") memberDenots(fieldFilter, (name, buf) => buf ++= member(name).altsWith(x => !x.is(Method))) - } /** The set of members of this type that have all of `required` flags but none of `excluded` flags set. */ - final def membersBasedOnFlags(required: FlagSet, excluded: FlagSet)(using Context): Seq[SingleDenotation] = { + final def membersBasedOnFlags(required: FlagSet, excluded: FlagSet)(using Context): Seq[SingleDenotation] = record("membersBasedOnFlags") memberDenots(takeAllFilter, (name, buf) => buf ++= memberBasedOnFlags(name, required, excluded).alternatives) - } /** All members of this type. Warning: this can be expensive to compute! */ - final def allMembers(using Context): Seq[SingleDenotation] = { + final def allMembers(using Context): Seq[SingleDenotation] = record("allMembers") memberDenots(takeAllFilter, (name, buf) => buf ++= member(name).alternatives) - } /** The info of `sym`, seen as a member of this type. */ final def memberInfo(sym: Symbol)(using Context): Type = @@ -1033,33 +977,29 @@ object Types { /** This type seen as if it were the type of a member of prefix type `pre` * declared in class `cls`. */ - final def asSeenFrom(pre: Type, cls: Symbol)(using Context): Type = { + final def asSeenFrom(pre: Type, cls: Symbol)(using Context): Type = record("asSeenFrom") if (!cls.membersNeedAsSeenFrom(pre)) this else TypeOps.asSeenFrom(this, pre, cls) - } // ----- Subtype-related -------------------------------------------- /** Is this type a subtype of that type? */ - final def <:<(that: Type)(using Context): Boolean = { + final def <:<(that: Type)(using Context): Boolean = record("<:<") TypeComparer.topLevelSubType(this, that) - } /** Is this type a subtype of that type? */ - final def frozen_<:<(that: Type)(using Context): Boolean = { + final def frozen_<:<(that: Type)(using Context): Boolean = record("frozen_<:<") TypeComparer.isSubTypeWhenFrozen(this, that) - } /** Is this type the same as that type? * This is the case iff `this <:< that` and `that <:< this`. */ - final def =:=(that: Type)(using Context): Boolean = { + final def =:=(that: Type)(using Context): Boolean = record("=:=") TypeComparer.isSameType(this, that) - } final def frozen_=:=(that: Type)(using Context): Boolean = TypeComparer.isSameTypeWhenFrozen(this, that) @@ -1087,18 +1027,15 @@ object Types { * @param isSubType a function used for checking subtype relationships. */ final def overrides(that: Type, relaxedCheck: Boolean, matchLoosely: => Boolean, checkClassInfo: Boolean = true, - isSubType: (Type, Type) => Context ?=> Boolean = (tp1, tp2) => tp1 frozen_<:< tp2)(using Context): Boolean = { + isSubType: (Type, Type) => Context ?=> Boolean = (tp1, tp2) => tp1 frozen_<:< tp2)(using Context): Boolean = val overrideCtx = if relaxedCheck then ctx.relaxedOverrideContext else ctx - inContext(overrideCtx) { + inContext(overrideCtx): !checkClassInfo && this.isInstanceOf[ClassInfo] || isSubType(this.widenExpr, that.widenExpr) - || matchLoosely && { + || matchLoosely `&&`: val this1 = this.widenNullaryMethod val that1 = that.widenNullaryMethod ((this1 `ne` this) || (that1 `ne` that)) && this1.overrides(that1, relaxedCheck, false, checkClassInfo) - } - } - } /** Is this type close enough to that type so that members * with the two types would override each other? @@ -1120,35 +1057,30 @@ object Types { * check the types. This is because we are using a relaxed rule (ignoring `Null` types) * to check overriding Java methods. */ - def matches(that: Type)(using Context): Boolean = { + def matches(that: Type)(using Context): Boolean = record("matches") val overrideCtx = if ctx.explicitNulls then ctx.relaxedOverrideContext else ctx TypeComparer.matchesType(this, that, relaxed = !ctx.phase.erasedTypes)(using overrideCtx) - } /** This is the same as `matches` except that it also matches => T with T and * vice versa. */ def matchesLoosely(that: Type)(using Context): Boolean = - (this matches that) || { + (this matches that) `||`: val thisResult = this.widenExpr val thatResult = that.widenExpr (this eq thisResult) != (that eq thatResult) && (thisResult matchesLoosely thatResult) - } /** The basetype of this type with given class symbol, NoType if `base` is not a class. */ - final def baseType(base: Symbol)(using Context): Type = { + final def baseType(base: Symbol)(using Context): Type = record("baseType") - base.denot match { + base.denot match case classd: ClassDenotation => classd.baseTypeOf(this) case _ => NoType - } - } - def & (that: Type)(using Context): Type = { + def & (that: Type)(using Context): Type = record("&") TypeComparer.glb(this, that) - } /** Safer version of `&`. * @@ -1159,30 +1091,27 @@ object Types { * (which will be masked unless `-Yno-deep-subtypes` is enabled). * pos/i536 demonstrates that the infinite loop can also involve lower bounds. */ - def safe_& (that: Type)(using Context): Type = (this, that) match { + def safe_& (that: Type)(using Context): Type = (this, that) match case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => TypeBounds( OrType.makeHk(lo1.stripLazyRef, lo2.stripLazyRef), AndType.makeHk(hi1.stripLazyRef, hi2.stripLazyRef)) case _ => this & that - } /** `this & that`, but handle CyclicReferences by falling back to `safe_&`. */ def recoverable_&(that: Type)(using Context): Type = try this & that - catch { + catch case ex: CyclicReference => this safe_& that // A test case where this happens is tests/pos/i536.scala. // The & causes a subtype check which calls baseTypeRef again with the same // superclass. - } - def | (that: Type)(using Context): Type = { + def | (that: Type)(using Context): Type = record("|") TypeComparer.lub(this, that) - } // ----- Unwrapping types ----------------------------------------------- @@ -1203,16 +1132,14 @@ object Types { val tp1 = stripped.dealias if tp1 ne this then tp1.strippedDealias else this - def rewrapAnnots(tp: Type)(using Context): Type = tp.stripTypeVar match { + def rewrapAnnots(tp: Type)(using Context): Type = tp.stripTypeVar match case AnnotatedType(tp1, annot) => AnnotatedType(rewrapAnnots(tp1), annot) case _ => this - } /** Strip PolyType prefixes */ - def stripPoly(using Context): Type = this match { + def stripPoly(using Context): Type = this match case tp: PolyType => tp.resType.stripPoly case _ => this - } /** Strip LazyRef wrappers */ def stripLazyRef(using Context): Type = this match @@ -1244,39 +1171,34 @@ object Types { /** Widen from singleton type to its underlying non-singleton * base type by applying one or more `underlying` dereferences. */ - final def widenSingleton(using Context): Type = stripped match { + final def widenSingleton(using Context): Type = stripped match case tp: SingletonType if !tp.isOverloaded => tp.underlying.widenSingleton case _ => this - } /** Widen from TermRef to its underlying non-termref * base type, while also skipping Expr types. */ - final def widenTermRefExpr(using Context): Type = stripTypeVar match { + final def widenTermRefExpr(using Context): Type = stripTypeVar match case tp: TermRef if !tp.isOverloaded => tp.underlying.widenExpr.widenTermRefExpr case _ => this - } /** Widen from ExprType type to its result type. * (Note: no stripTypeVar needed because TypeVar's can't refer to ExprTypes.) */ - final def widenExpr: Type = this match { + final def widenExpr: Type = this match case tp: ExprType => tp.resType case _ => this - } /** Widen type if it is unstable (i.e. an ExprType, or TermRef to unstable symbol */ - final def widenIfUnstable(using Context): Type = stripTypeVar match { + final def widenIfUnstable(using Context): Type = stripTypeVar match case tp: ExprType => tp.resultType.widenIfUnstable case tp: TermRef if tp.symbol.exists && !tp.symbol.isStableMember => tp.underlying.widenIfUnstable case _ => this - } /** If this is a skolem, its underlying type, otherwise the type itself */ - final def widenSkolem(using Context): Type = this match { + final def widenSkolem(using Context): Type = this match case tp: SkolemType => tp.underlying case _ => this - } /** Widen this type and if the result contains embedded soft union types, replace * them by their joins. @@ -1298,13 +1220,13 @@ object Types { */ def widenUnion(using Context): Type = widen match case tp: OrType => tp match - case OrNull(tp1) => + case OrNull(tp1) => // Don't widen `T|Null`, since otherwise we wouldn't be able to infer nullable unions. - val tp1Widen = tp1.widenUnionWithoutNull - if (tp1Widen.isRef(defn.AnyClass)) tp1Widen - else tp.derivedOrType(tp1Widen, defn.NullType) - case _ => - tp.widenUnionWithoutNull + val tp1Widen = tp1.widenUnionWithoutNull + if (tp1Widen.isRef(defn.AnyClass)) tp1Widen + else tp.derivedOrType(tp1Widen, defn.NullType) + case _ => + tp.widenUnionWithoutNull case tp => tp.widenUnionWithoutNull @@ -1325,7 +1247,7 @@ object Types { * and going to the operands of & and |. * Overridden and cached in OrType. */ - def widenSingletons(using Context): Type = dealias match { + def widenSingletons(using Context): Type = dealias match case tp: SingletonType => tp.widen case tp: OrType => @@ -1337,7 +1259,6 @@ object Types { if ((tp.tp1 eq tp1w) && (tp.tp2 eq tp2w)) this else tp1w & tp2w case _ => this - } /** If this is a nullary method type, its result type */ def widenNullaryMethod(using Context): Type = this match @@ -1386,14 +1307,13 @@ object Types { case Atoms.Unknown => Atoms.Unknown case _ => Atoms.Unknown - private def dealias1(keep: AnnotatedType => Context ?=> Boolean, keepOpaques: Boolean)(using Context): Type = this match { + private def dealias1(keep: AnnotatedType => Context ?=> Boolean, keepOpaques: Boolean)(using Context): Type = this match case tp: TypeRef => if (tp.symbol.isClass) tp - else tp.info match { + else tp.info match case TypeAlias(alias) if !(keepOpaques && tp.symbol.is(Opaque)) => alias.dealias1(keep, keepOpaques) case _ => tp - } case app @ AppliedType(tycon, _) => val tycon1 = tycon.dealias1(keep, keepOpaques) if (tycon1 ne tycon) app.superType.dealias1(keep, keepOpaques) @@ -1412,7 +1332,6 @@ object Types { case tp: LazyRef => tp.ref.dealias1(keep, keepOpaques) case _ => this - } /** Follow aliases and dereferences LazyRefs, annotated types and instantiated * TypeVars until type is no longer alias type, annotated type, LazyRef, @@ -1437,22 +1356,19 @@ object Types { /** Approximate this type with a type that does not contain skolem types. */ final def deskolemized(using Context): Type = - val deskolemizer = new ApproximatingTypeMap { + val deskolemizer = new ApproximatingTypeMap: def apply(tp: Type) = /*trace(i"deskolemize($tp) at $variance", show = true)*/ - tp match { + tp match case tp: SkolemType => range(defn.NothingType, atVariance(1)(apply(tp.info))) case _ => mapOver(tp) - } - } deskolemizer(this) /** The result of normalization using `tryNormalize`, or the type itself if * tryNormlize yields NoType */ - final def normalized(using Context): Type = { + final def normalized(using Context): Type = val normed = tryNormalize if (normed.exists) normed else this - } /** If this type can be normalized at the top-level by rewriting match types * of S[n] types, the result after applying all toplevel normalizations, @@ -1460,10 +1376,9 @@ object Types { */ def tryNormalize(using Context): Type = NoType - private def widenDealias1(keep: AnnotatedType => Context ?=> Boolean)(using Context): Type = { + private def widenDealias1(keep: AnnotatedType => Context ?=> Boolean)(using Context): Type = val res = this.widen.dealias1(keep, keepOpaques = false) if (res eq this) res else res.widenDealias1(keep) - } /** Perform successive widenings and dealiasings until none can be applied anymore */ final def widenDealias(using Context): Type = widenDealias1(keepNever) @@ -1477,28 +1392,25 @@ object Types { /** Widen from constant type to its underlying non-constant * base type. */ - final def deconst(using Context): Type = stripTypeVar match { + final def deconst(using Context): Type = stripTypeVar match case tp: ConstantType => tp.value.tpe case _ => this - } /** Dealias, and if result is a dependent function type, drop the `apply` refinement. */ - final def dropDependentRefinement(using Context): Type = dealias match { + final def dropDependentRefinement(using Context): Type = dealias match case RefinedType(parent, nme.apply, mt) if defn.isNonRefinedFunction(parent) => parent case tp => tp - } /** The type constructor of an applied type, otherwise the type itself */ - final def typeConstructor(using Context): Type = this match { + final def typeConstructor(using Context): Type = this match case AppliedType(tycon, _) => tycon case _ => this - } /** If this is a (possibly aliased, annotated, and/or parameterized) reference to * a class, the class type ref, otherwise NoType. * @param refinementOK If `true` we also skip refinements. */ - def underlyingClassRef(refinementOK: Boolean)(using Context): Type = dealias match { + def underlyingClassRef(refinementOK: Boolean)(using Context): Type = dealias match case tp: TypeRef => if (tp.symbol.isClass) tp else if (tp.symbol.isAliasType) tp.underlying.underlyingClassRef(refinementOK) @@ -1514,21 +1426,18 @@ object Types { tp.underlying.underlyingClassRef(refinementOK) case _ => NoType - } /** The iterator of underlying types as long as type is a TypeProxy. * Useful for diagnostics */ - def underlyingIterator(using Context): Iterator[Type] = new Iterator[Type] { + def underlyingIterator(using Context): Iterator[Type] = new Iterator[Type]: var current = Type.this var hasNext = true - def next = { + def next = val res = current hasNext = current.isInstanceOf[TypeProxy] if (hasNext) current = current.asInstanceOf[TypeProxy].underlying res - } - } /** A prefix-less refined this or a termRef to a new skolem symbol * that has the given type as info. @@ -1539,16 +1448,14 @@ object Types { /** Useful for diagnostics: The underlying type if this type is a type proxy, * otherwise NoType */ - def underlyingIfProxy(using Context): Type = this match { + def underlyingIfProxy(using Context): Type = this match case this1: TypeProxy => this1.underlying case _ => NoType - } /** If this is a repeated type, its element type, otherwise the type itself */ - def repeatedToSingle(using Context): Type = this match { + def repeatedToSingle(using Context): Type = this match case tp @ ExprType(tp1) => tp.derivedExprType(tp1.repeatedToSingle) case _ => if (isRepeatedParam) this.argTypesHi.head else this - } /** The capture set of this type. Overridden and cached in CaptureRef */ def captureSet(using Context): CaptureSet = CaptureSet.ofType(this) @@ -1570,15 +1477,14 @@ object Types { * * (*) normalizes means: follow instantiated typevars and aliases. */ - def lookupRefined(name: Name)(using Context): Type = { - @tailrec def loop(pre: Type): Type = pre.stripTypeVar match { + def lookupRefined(name: Name)(using Context): Type = + @tailrec def loop(pre: Type): Type = pre.stripTypeVar match case pre: RefinedType => - pre.refinedInfo match { + pre.refinedInfo match case tp: AliasingBounds => if (pre.refinedName ne name) loop(pre.parent) else tp.alias case _ => loop(pre.parent) - } case pre: RecType => val candidate = pre.parent.lookupRefined(name) if (candidate.exists && !pre.isReferredToBy(candidate)) @@ -1588,16 +1494,13 @@ object Types { case SkolemType(tp) => loop(tp) case pre: TypeRef => - pre.info match { + pre.info match case TypeAlias(alias) => loop(alias) case _ => NoType - } case _ => NoType - } loop(this) - } /** The type , reduced if possible */ def select(name: Name)(using Context): Type = @@ -1625,7 +1528,7 @@ object Types { * Inherited by all other type proxies. * `NoType` for all other types. */ - @tailrec final def normalizedPrefix(using Context): Type = this match { + @tailrec final def normalizedPrefix(using Context): Type = this match case tp: NamedType => if (tp.symbol.info.isTypeAlias) tp.info.normalizedPrefix else tp.prefix case tp: ClassInfo => @@ -1634,10 +1537,9 @@ object Types { tp.superType.normalizedPrefix case _ => NoType - } /** The full parent types, including all type arguments */ - def parents(using Context): List[Type] = this match { + def parents(using Context): List[Type] = this match case tp @ AppliedType(tycon, args) if tycon.typeSymbol.isClass => tycon.parents.map(_.subst(tycon.typeSymbol.typeParams, args)) case tp: TypeRef => @@ -1649,43 +1551,36 @@ object Types { case tp: TypeProxy => tp.superType.parents case _ => Nil - } /** The first parent of this type, AnyRef if list of parents is empty */ - def firstParent(using Context): Type = parents match { + def firstParent(using Context): Type = parents match case p :: _ => p case _ => defn.AnyType - } /** The parameter types of a PolyType or MethodType, Empty list for others */ - final def paramInfoss(using Context): List[List[Type]] = stripPoly match { + final def paramInfoss(using Context): List[List[Type]] = stripPoly match case mt: MethodType => mt.paramInfos :: mt.resultType.paramInfoss case _ => Nil - } /** The parameter names of a PolyType or MethodType, Empty list for others */ - final def paramNamess(using Context): List[List[TermName]] = stripPoly match { + final def paramNamess(using Context): List[List[TermName]] = stripPoly match case mt: MethodType => mt.paramNames :: mt.resultType.paramNamess case _ => Nil - } /** The parameter types in the first parameter section of a generic type or MethodType, Empty list for others */ - final def firstParamTypes(using Context): List[Type] = stripPoly match { + final def firstParamTypes(using Context): List[Type] = stripPoly match case mt: MethodType => mt.paramInfos case _ => Nil - } /** The parameter names in the first parameter section of a generic type or MethodType, Empty list for others */ - final def firstParamNames(using Context): List[TermName] = stripPoly match { + final def firstParamNames(using Context): List[TermName] = stripPoly match case mt: MethodType => mt.paramNames case _ => Nil - } /** Is this either not a method at all, or a parameterless method? */ - final def isParameterless(using Context): Boolean = stripPoly match { + final def isParameterless(using Context): Boolean = stripPoly match case mt: MethodType => false case _ => true - } /** Is this (an alias of) the `scala.Null` type? */ final def isNullType(using Context) = isRef(defn.NullClass) @@ -1699,10 +1594,9 @@ object Types { /** The final result type of a PolyType, MethodType, or ExprType, after skipping * all parameter sections, the type itself for all others. */ - def finalResultType(using Context): Type = resultType.stripPoly match { + def finalResultType(using Context): Type = resultType.stripPoly match case mt: MethodType => mt.resultType.finalResultType case _ => resultType - } /** Determine the expected function type from the prototype. * If no function type is found, NoType is returned. If multiple @@ -1732,28 +1626,24 @@ object Types { NoType /** This type seen as a TypeBounds */ - final def bounds(using Context): TypeBounds = this match { + final def bounds(using Context): TypeBounds = this match case tp: TypeBounds => tp case ci: ClassInfo => TypeAlias(ci.appliedRef) case wc: WildcardType => - wc.optBounds match { + wc.optBounds match case bounds: TypeBounds => bounds case NoType => TypeBounds.empty - } case _ => TypeAlias(this) - } /** The lower bound of a TypeBounds type, the type itself otherwise */ - def loBound: Type = this match { + def loBound: Type = this match case tp: TypeBounds => tp.lo case _ => this - } /** The upper bound of a TypeBounds type, the type itself otherwise */ - def hiBound: Type = this match { + def hiBound: Type = this match case tp: TypeBounds => tp.hi case _ => this - } /** The type parameter with given `name`. This tries first `decls` * in order not to provoke a cycle by forcing the info. If that yields @@ -1793,15 +1683,13 @@ object Types { */ final def subst(from: List[Symbol], to: List[Type])(using Context): Type = if (from.isEmpty) this - else { + else val from1 = from.tail if (from1.isEmpty) Substituters.subst1(this, from.head, to.head, null) - else { + else val from2 = from1.tail if (from2.isEmpty) Substituters.subst2(this, from.head, to.head, from1.head, to.tail.head, null) else Substituters.subst(this, from, to, null) - } - } /** Substitute all types of the form `TypeParamRef(from, N)` by * `TypeParamRef(to, N)`. @@ -1849,21 +1737,19 @@ object Types { * when forming the function type. * @param alwaysDependent if true, always create a dependent function type. */ - def toFunctionType(isJava: Boolean, dropLast: Int = 0, alwaysDependent: Boolean = false)(using Context): Type = this match { + def toFunctionType(isJava: Boolean, dropLast: Int = 0, alwaysDependent: Boolean = false)(using Context): Type = this match case mt: MethodType if !mt.isParamDependent => val formals1 = if (dropLast == 0) mt.paramInfos else mt.paramInfos dropRight dropLast val isContextual = mt.isContextualMethod && !ctx.erasedTypes - val result1 = mt.nonDependentResultApprox match { + val result1 = mt.nonDependentResultApprox match case res: MethodType => res.toFunctionType(isJava) case res => res - } val funType = defn.FunctionOf( formals1 mapConserve (_.translateFromRepeated(toArray = isJava)), result1, isContextual) if alwaysDependent || mt.isResultDependent then RefinedType(funType, nme.apply, mt) else funType - } /** The signature of this type. This is by default NotAMethod, * but is overridden for PolyTypes, MethodTypes, and TermRef types. @@ -1874,13 +1760,12 @@ object Types { def signature(using Context): Signature = Signature.NotAMethod /** Drop annotation of given `cls` from this type */ - def dropAnnot(cls: Symbol)(using Context): Type = stripTypeVar match { + def dropAnnot(cls: Symbol)(using Context): Type = stripTypeVar match case self @ AnnotatedType(pre, annot) => if (annot.symbol eq cls) pre else self.derivedAnnotatedType(pre.dropAnnot(cls), annot) case _ => this - } def dropRepeatedAnnot(using Context): Type = dropAnnot(defn.RepeatedAnnot) @@ -1889,7 +1774,7 @@ object Types { * Translates `Seq[T] @repeated` or `Array[T] @repeated` to `[T]`. * That way, repeated arguments are made manifest without risk of dropped annotations. */ - def annotatedToRepeated(using Context): Type = this match { + def annotatedToRepeated(using Context): Type = this match case tp @ ExprType(tp1) => tp.derivedExprType(tp1.annotatedToRepeated) case self @ AnnotatedType(tp, annot) if annot matches defn.RetainsByNameAnnot => @@ -1899,7 +1784,6 @@ object Types { assert(typeSym == defn.SeqClass || typeSym == defn.ArrayClass) tp.translateParameterized(typeSym, defn.RepeatedParamClass) case _ => this - } /** A type capturing `ref` */ def capturing(ref: CaptureRef)(using Context): Type = @@ -1929,10 +1813,9 @@ object Types { /** Utility method to show the underlying type of a TypeProxy chain together * with the proxy type itself. */ - def showWithUnderlying(n: Int = 1)(using Context): String = this match { + def showWithUnderlying(n: Int = 1)(using Context): String = this match case tp: TypeProxy if n > 0 => s"$show with underlying ${tp.underlying.showWithUnderlying(n - 1)}" case _ => show - } /** A simplified version of this type which is equivalent wrt =:= to this type. * This applies a typemap to the type which (as all typemaps) follows type @@ -1983,7 +1866,6 @@ object Types { /** Is the `hash` of this type the same for all possible sequences of enclosing binders? */ def hashIsStable: Boolean = true - } // end Type @@ -1995,7 +1877,7 @@ object Types { /** A marker trait for type proxies. * Each implementation is expected to redefine the `underlying` method. */ - abstract class TypeProxy extends Type { + abstract class TypeProxy extends Type: /** The type to which this proxy forwards operations. */ def underlying(using Context): Type @@ -2005,10 +1887,9 @@ object Types { * - instead of a TyperBounds type it returns its upper bound, and * - for applied types it returns the upper bound of the constructor re-applied to the arguments. */ - def superType(using Context): Type = underlying match { + def superType(using Context): Type = underlying match case TypeBounds(_, hi) => hi case st => st - } def superTypeNormalized(using Context): Type = superType.normalized @@ -2022,7 +1903,6 @@ object Types { * the very specific cache invalidation condition for matchtypes). */ def translucentSuperType(using Context): Type = superType - } // Every type has to inherit one of the following four abstract type classes., // which determine whether the type is cached, and whether @@ -2030,52 +1910,42 @@ object Types { // is for efficiency. /** Instances of this class are cached and are not proxies. */ - abstract class CachedGroundType extends Type with CachedType { + abstract class CachedGroundType extends Type with CachedType: private var myHash = HashUnknown - final def hash: Int = { - if (myHash == HashUnknown) { + final def hash: Int = + if (myHash == HashUnknown) myHash = computeHash(null) assert(myHash != HashUnknown) - } myHash - } override final def hashCode: Int = if (hash == NotCached) System.identityHashCode(this) else hash - } /** Instances of this class are cached and are proxies. */ - abstract class CachedProxyType extends TypeProxy with CachedType { + abstract class CachedProxyType extends TypeProxy with CachedType: protected var myHash: Int = HashUnknown - final def hash: Int = { - if (myHash == HashUnknown) { + final def hash: Int = + if (myHash == HashUnknown) myHash = computeHash(null) assert(myHash != HashUnknown) - } myHash - } override final def hashCode: Int = if (hash == NotCached) System.identityHashCode(this) else hash - } /** Instances of this class are uncached and are not proxies. */ - abstract class UncachedGroundType extends Type { + abstract class UncachedGroundType extends Type: final def hash: Int = NotCached final def computeHash(bs: Binders): Int = NotCached - if (monitored) { + if (monitored) record(s"uncachable") record(s"uncachable: $getClass") - } - } /** Instances of this class are uncached and are proxies. */ - abstract class UncachedProxyType extends TypeProxy { + abstract class UncachedProxyType extends TypeProxy: final def hash: Int = NotCached final def computeHash(bs: Binders): Int = NotCached - if (monitored) { + if (monitored) record(s"uncachable") record(s"uncachable: $getClass") - } - } /** A marker trait for types that apply only to type symbols */ trait TypeType extends Type @@ -2094,9 +1964,8 @@ object Types { /** A marker trait for types that are guaranteed to contain only a * single non-null value (they might contain null in addition). */ - trait SingletonType extends TypeProxy with ValueType { + trait SingletonType extends TypeProxy with ValueType: def isOverloaded(using Context): Boolean = false - } /** A trait for references in CaptureSets. These can be NamedTypes, ThisTypes or ParamRefs */ trait CaptureRef extends SingletonType: @@ -2151,12 +2020,12 @@ object Types { /** A trait for types that bind other types that refer to them. * Instances are: LambdaType, RecType. */ - trait BindingType extends Type { + trait BindingType extends Type: /** If this type is in `bs`, a hashcode based on its position in `bs`. * Otherise the standard identity hash. */ - override def identityHash(bs: Binders): Int = { + override def identityHash(bs: Binders): Int = def recur(n: Int, tp: BindingType, rest: Binders): Int = if (this `eq` tp) finishHash(hashing.mix(hashSeed, n), 1) else if (rest == null) System.identityHashCode(this) @@ -2164,14 +2033,12 @@ object Types { avoidSpecialHashes( if (bs == null) System.identityHashCode(this) else recur(1, bs.tp, bs.next)) - } def equalBinder(that: BindingType, bs: BinderPairs): Boolean = (this `eq` that) || bs != null && bs.matches(this, that) - } /** A trait for proto-types, used as expected types in typer */ - trait ProtoType extends Type { + trait ProtoType extends Type: def isMatchedBy(tp: Type, keepConstraint: Boolean = false)(using Context): Boolean def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T def map(tm: TypeMap)(using Context): ProtoType @@ -2182,16 +2049,13 @@ object Types { def withContext(ctx: Context): ProtoType = this override def dropIfProto = WildcardType - } /** Implementations of this trait cache the results of `narrow`. */ - trait NarrowCached extends Type { + trait NarrowCached extends Type: private var myNarrow: TermRef | Null = null - override def narrow(using Context): TermRef = { + override def narrow(using Context): TermRef = if (myNarrow == null) myNarrow = super.narrow myNarrow.nn - } - } // --- NamedTypes ------------------------------------------------------------------ @@ -2227,15 +2091,13 @@ object Types { /** If designator is a name, this name. Otherwise, the original name * of the designator symbol. */ - final def name(using Context): ThisName = { + final def name(using Context): ThisName = if (myName == null) myName = computeName myName.asInstanceOf[ThisName] - } - private def computeName: Name = designator match { + private def computeName: Name = designator match case name: Name => name case sym: Symbol => sym.originDenotation.name - } final override def signature(using Context): Signature = /** The signature computed from the last known denotation with `sigFromDenot`, @@ -2308,10 +2170,9 @@ object Types { * type accumulators, as well as to be safe in diagnostic printing. * Normally, it's better to use `symbol`, not `currentSymbol`. */ - final def currentSymbol(using Context): Symbol = designator match { + final def currentSymbol(using Context): Symbol = designator match case sym: Symbol => sym case _ => if (denotationIsCurrent) lastDenotation.nn.symbol else NoSymbol - } /** Retrieves currently valid symbol without necessarily updating denotation. * Assumes that symbols do not change between periods in the same run. @@ -2334,10 +2195,10 @@ object Types { if checkedPeriod.code != NowhereCode && lastd.validFor.contains(ctx.period) then lastd else computeDenot - private def computeDenot(using Context): Denotation = { + private def computeDenot(using Context): Denotation = util.Stats.record("NamedType.computeDenot") - def finish(d: Denotation) = { + def finish(d: Denotation) = if (d.exists) // Avoid storing NoDenotations in the cache - we will not be able to recover from // them. The situation might arise that a type has NoDenotation in some later @@ -2346,9 +2207,8 @@ object Types { // forth also in these cases. setDenot(d) d - } - def fromDesignator = designator match { + def fromDesignator = designator match case name: Name => val sym = lastSymbol val allowPrivate = sym == null || (sym == NoSymbol) || sym.lastKnownDenotation.flagsUNSAFE.is(Private) @@ -2363,40 +2223,34 @@ object Types { finish(memberDenot(symd.initial.name, allowPrivate = symd.is(Private))) else finish(symd.current) - } - lastDenotation match { + lastDenotation match case lastd0: SingleDenotation => val lastd = lastd0.skipRemoved if lastd.validFor.runId == ctx.runId && checkedPeriod.code != NowhereCode then finish(lastd.current) - else lastd match { + else lastd match case lastd: SymDenotation => if stillValid(lastd) && checkedPeriod.code != NowhereCode then finish(lastd.current) else finish(memberDenot(lastd.initial.name, allowPrivate = false)) case _ => fromDesignator - } case _ => fromDesignator - } - } private def disambiguate(d: Denotation)(using Context): Denotation = disambiguate(d, currentSignature, currentSymbol.targetName) private def disambiguate(d: Denotation, sig: Signature | Null, target: Name)(using Context): Denotation = if (sig != null) - d.atSignature(sig, target, relaxed = !ctx.erasedTypes) match { + d.atSignature(sig, target, relaxed = !ctx.erasedTypes) match case d1: SingleDenotation => d1 case d1 => - d1.atSignature(sig, target, relaxed = false) match { + d1.atSignature(sig, target, relaxed = false) match case d2: SingleDenotation => d2 case d2 => d2.suchThat(currentSymbol.eq).orElse(d2) - } - } else d - private def memberDenot(name: Name, allowPrivate: Boolean)(using Context): Denotation = { + private def memberDenot(name: Name, allowPrivate: Boolean)(using Context): Denotation = var d = memberDenot(prefix, name, allowPrivate) if (!d.exists && !allowPrivate && ctx.mode.is(Mode.Interactive)) // In the IDE we might change a public symbol to private, and would still expect to find it. @@ -2407,28 +2261,26 @@ object Types { if (d.isOverloaded) d = disambiguate(d) d - } private def memberDenot(prefix: Type, name: Name, allowPrivate: Boolean)(using Context): Denotation = if (allowPrivate) prefix.member(name) else prefix.nonPrivateMember(name) - private def argDenot(param: TypeSymbol)(using Context): Denotation = { + private def argDenot(param: TypeSymbol)(using Context): Denotation = val cls = param.owner val args = prefix.baseType(cls).argInfos val typeParams = cls.typeParams - def concretize(arg: Type, tparam: TypeSymbol) = arg match { + def concretize(arg: Type, tparam: TypeSymbol) = arg match case arg: TypeBounds => TypeRef(prefix, tparam) case arg => arg - } val concretized = args.zipWithConserve(typeParams)(concretize) def rebase(arg: Type) = arg.subst(typeParams, concretized) val idx = typeParams.indexOf(param) - if (0 <= idx && idx < args.length) { - val argInfo = args(idx) match { + if (0 <= idx && idx < args.length) + val argInfo = args(idx) match case arg: TypeBounds => val v = param.paramVarianceSign val pbounds = param.paramInfo @@ -2436,10 +2288,8 @@ object Types { else if (v < 0 && pbounds.hiBound.dealiasKeepAnnots.isExactlyAny) TypeAlias(arg.loBound | rebase(pbounds.loBound)) else arg recoverable_& rebase(pbounds) case arg => TypeAlias(arg) - } param.derivedSingleDenotation(param, argInfo) - } - else { + else if (!ctx.reporter.errorsReported) throw TypeError( em"""bad parameter reference $this at ${ctx.phase} @@ -2448,8 +2298,6 @@ object Types { |idx = $idx, args = $args%, %, |constraint = ${ctx.typerState.constraint}""") NoDenotation - } - } /** Reload denotation by computing the member with the reference's name as seen * from the reference's prefix. @@ -2457,26 +2305,24 @@ object Types { def recomputeDenot()(using Context): Unit = setDenot(memberDenot(name, allowPrivate = !symbol.exists || symbol.is(Private))) - private def setDenot(denot: Denotation)(using Context): Unit = { + private def setDenot(denot: Denotation)(using Context): Unit = if ctx.base.checkNoDoubleBindings then checkSymAssign(denot.symbol) lastDenotation = denot lastSymbol = denot.symbol checkedPeriod = if (prefix.isProvisional) Nowhere else ctx.period - designator match { + designator match case sym: Symbol if designator ne lastSymbol.nn => designator = lastSymbol.asInstanceOf[Designator{ type ThisName = self.ThisName }] case _ => - } checkDenot() - } private def checkDenot()(using Context) = {} //if name.toString == "getConstructor" then // println(i"set denot of $this to ${denot.info}, ${denot.getClass}, ${Phases.phaseOf(denot.validFor.lastPhaseId)} at ${ctx.phase}") - private def checkSymAssign(sym: Symbol)(using Context) = { + private def checkSymAssign(sym: Symbol)(using Context) = def selfTypeOf(sym: Symbol) = if (sym.isClass) sym.asClass.givenSelfType else NoType val lastSym = lastSymbol @@ -2512,7 +2358,6 @@ object Types { |last sym id = ${lastSym.id}, new sym id = ${sym.id}, |last owner = ${lastSym.owner}, new owner = ${sym.owner}, |period = ${ctx.phase} at run ${ctx.runId}""" }) - } /** A reference with the initial symbol in `symd` has an info that * might depend on the given prefix. @@ -2562,20 +2407,18 @@ object Types { /** Is this a reference to a class or object member with an info that might depend * on the prefix? */ - def isPrefixDependentMemberRef(using Context): Boolean = designator match { + def isPrefixDependentMemberRef(using Context): Boolean = designator match case sym: Symbol => infoDependsOnPrefix(sym, prefix) case _ => true - } /** Reduce a type-ref `T { X = U; ... } # X` to `U` * provided `U` does not refer with a RecThis to the * refinement type `T { X = U; ... }` */ def reduceProjection(using Context): Type = - if (isType) { + if (isType) val reduced = prefix.lookupRefined(name) if (reduced.exists) reduced else this - } else this /** Guard against cycles that can arise if given `op` @@ -2588,20 +2431,18 @@ object Types { * These are errors but we have to make sure that operations do * not loop before the error is detected. */ - final def controlled[T](op: => T)(using Context): T = try { + final def controlled[T](op: => T)(using Context): T = try ctx.base.underlyingRecursions += 1 if (ctx.base.underlyingRecursions < Config.LogPendingUnderlyingThreshold) op else if (ctx.pendingUnderlying contains this) throw CyclicReference(symbol) else - try { + try ctx.pendingUnderlying += this op - } finally ctx.pendingUnderlying -= this - } finally ctx.base.underlyingRecursions -= 1 @@ -2611,42 +2452,37 @@ object Types { * Otherwise, a typebounds argument is dropped and the original type parameter * reference is returned. */ - def argForParam(pre: Type, widenAbstract: Boolean = false)(using Context): Type = { + def argForParam(pre: Type, widenAbstract: Boolean = false)(using Context): Type = val tparam = symbol val cls = tparam.owner val base = pre.baseType(cls) - base.stripped match { + base.stripped match case AppliedType(tycon, allArgs) => var tparams = cls.typeParams var args = allArgs var idx = 0 - while (tparams.nonEmpty && args.nonEmpty) { + while (tparams.nonEmpty && args.nonEmpty) if (tparams.head.eq(tparam)) - return args.head match { + return args.head match case _: TypeBounds if !widenAbstract => TypeRef(pre, tparam) case arg => arg.boxedUnlessFun(tycon) - } tparams = tparams.tail args = args.tail idx += 1 - } NoType case base: AndOrType => var tp1 = argForParam(base.tp1) var tp2 = argForParam(base.tp2) val variance = tparam.paramVarianceSign - if (isBounds(tp1) || isBounds(tp2) || variance == 0) { + if (isBounds(tp1) || isBounds(tp2) || variance == 0) // compute argument as a type bounds instead of a point type tp1 = tp1.bounds tp2 = tp2.bounds - } if (base.isAnd == variance >= 0) tp1 & tp2 else tp1 | tp2 case _ => if (pre.termSymbol.is(Package)) argForParam(pre.select(nme.PACKAGE)) else if (pre.isExactlyNothing) pre else NoType - } - } /** A selection of the same kind, but with potentially a different prefix. * The following normalizations are performed for type selections T#A: @@ -2663,19 +2499,18 @@ object Types { def derivedSelect(prefix: Type)(using Context): Type = if (prefix eq this.prefix) this else if (prefix.isExactlyNothing) prefix - else { - if (isType) { + else + if (isType) val res = if (currentSymbol.isAllOf(ClassTypeParam)) argForParam(prefix) else prefix.lookupRefined(name) if (res.exists) return res if (Config.splitProjections) - prefix match { + prefix match case prefix: AndType => - def isMissing(tp: Type) = tp match { + def isMissing(tp: Type) = tp match case tp: TypeRef => !tp.info.exists case _ => false - } val derived1 = derivedSelect(prefix.tp1) val derived2 = derivedSelect(prefix.tp2) return ( @@ -2687,11 +2522,8 @@ object Types { val derived2 = derivedSelect(prefix.tp2) return prefix.derivedOrType(derived1, derived2) case _ => - } - } if (prefix.isInstanceOf[WildcardType]) WildcardType.sameKindAs(this) else withPrefix(prefix) - } /** A reference like this one, but with the given symbol, if it exists */ private def withSym(sym: Symbol)(using Context): ThisType = @@ -2738,8 +2570,8 @@ object Types { this /** A reference like this one, but with the given prefix. */ - final def withPrefix(prefix: Type)(using Context): Type = { - def reload(): NamedType = { + final def withPrefix(prefix: Type)(using Context): Type = + def reload(): NamedType = val lastSym = lastSymbol.nn val allowPrivate = !lastSym.exists || lastSym.is(Private) var d = memberDenot(prefix, name, allowPrivate) @@ -2749,13 +2581,12 @@ object Types { else lastSym.asSeenFrom(prefix).signature, lastSym.targetName) NamedType(prefix, name, d) - } if (prefix eq this.prefix) this else if !NamedType.validPrefix(prefix) then UnspecifiedErrorType else if (lastDenotation == null) NamedType(prefix, designator) - else designator match { + else designator match case sym: Symbol => - if (infoDependsOnPrefix(sym, prefix) && !prefix.isArgPrefixOf(sym)) { + if (infoDependsOnPrefix(sym, prefix) && !prefix.isArgPrefixOf(sym)) val candidate = reload() val falseOverride = sym.isClass && candidate.symbol.exists && candidate.symbol != symbol // A false override happens if we rebind an inner class to another type with the same name @@ -2763,28 +2594,23 @@ object Types { // return a type with the existing class info as seen from the new prefix instead. if (falseOverride) NamedType(prefix, sym.name, denot.asSeenFrom(prefix)) else candidate - } else NamedType(prefix, sym) case name: Name => reload() - } - } override def equals(that: Any): Boolean = equals(that, null) - override def iso(that: Any, bs: BinderPairs): Boolean = that match { + override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: NamedType => designator.equals(that.designator) && prefix.equals(that.prefix, bs) case _ => false - } override def computeHash(bs: Binders): Int = doHash(bs, designator, prefix) - override def hashIsStable: Boolean = { + override def hashIsStable: Boolean = if (myStableHash == 0) myStableHash = if (prefix.hashIsStable) 1 else -1 myStableHash > 0 - } override def eql(that: Type): Boolean = this eq that // safe because named types are hash-consed separately } @@ -2792,16 +2618,15 @@ object Types { /** A reference to an implicit definition. This can be either a TermRef or a * Implicits.RenamedImplicitRef. */ - trait ImplicitRef { + trait ImplicitRef: def implicitName(using Context): TermName def underlyingRef: TermRef - } /** The singleton type for path prefix#myDesignator. */ abstract case class TermRef(override val prefix: Type, private var myDesignator: Designator) - extends NamedType, ImplicitRef, CaptureRef { + extends NamedType, ImplicitRef, CaptureRef: type ThisType = TermRef type ThisName = TermName @@ -2810,10 +2635,9 @@ object Types { override protected def designator_=(d: Designator): Unit = myDesignator = d //assert(name.toString != "") - override def underlying(using Context): Type = { + override def underlying(using Context): Type = val d = denot if (d.isOverloaded) NoType else d.info - } override def isOverloaded(using Context): Boolean = denot.isOverloaded @@ -2843,11 +2667,10 @@ object Types { override def normalizedRef(using Context): CaptureRef = if canBeTracked then symbol.termRef else this - } abstract case class TypeRef(override val prefix: Type, private var myDesignator: Designator) - extends NamedType { + extends NamedType: type ThisType = TypeRef type ThisName = TypeName @@ -2884,29 +2707,25 @@ object Types { override def underlying(using Context): Type = info - override def translucentSuperType(using Context) = info match { + override def translucentSuperType(using Context) = info match case TypeAlias(aliased) => aliased case TypeBounds(_, hi) => if (symbol.isOpaqueAlias) symbol.opaqueAlias.asSeenFrom(prefix, symbol.owner).orElse(hi) // orElse can happen for malformed input else hi case _ => underlying - } /** Hook that can be called from creation methods in TermRef and TypeRef */ def validated(using Context): this.type = this - } - final class CachedTermRef(prefix: Type, designator: Designator, hc: Int) extends TermRef(prefix, designator) { + final class CachedTermRef(prefix: Type, designator: Designator, hc: Int) extends TermRef(prefix, designator): assert((prefix ne NoPrefix) || designator.isInstanceOf[Symbol]) myHash = hc - } - final class CachedTypeRef(prefix: Type, designator: Designator, hc: Int) extends TypeRef(prefix, designator) { + final class CachedTypeRef(prefix: Type, designator: Designator, hc: Int) extends TypeRef(prefix, designator): assert((prefix ne NoPrefix) || designator.isInstanceOf[Symbol]) myHash = hc - } /** Assert current phase does not have erasure semantics */ private def assertUnerased()(using Context) = @@ -2921,7 +2740,7 @@ object Types { * a reference with a name as designator so that the denotation will be correctly updated in * the future. See also NamedType#withDenot. Test case is neg/opaque-self-encoding.scala. */ - private def designatorFor(prefix: Type, name: Name, denot: Denotation)(using Context): Designator = { + private def designatorFor(prefix: Type, name: Name, denot: Denotation)(using Context): Designator = def ownerIsPrefix(owner: Symbol) = prefix match case prefix: ThisType => prefix.sameThis(owner.thisType) case _ => false @@ -2930,13 +2749,11 @@ object Types { sym else name - } - object NamedType { - def isType(desig: Designator)(using Context): Boolean = desig match { + object NamedType: + def isType(desig: Designator)(using Context): Boolean = desig match case sym: Symbol => sym.isType case name: Name => name.isTypeName - } def apply(prefix: Type, designator: Designator)(using Context): NamedType = if (isType(designator)) TypeRef.apply(prefix, designator) else TermRef.apply(prefix, designator) @@ -2946,9 +2763,8 @@ object Types { def unapply(tp: NamedType): NamedType = tp def validPrefix(prefix: Type): Boolean = prefix.isValueType || (prefix eq NoPrefix) - } - object TermRef { + object TermRef: /** Create a term ref with given designator */ def apply(prefix: Type, desig: Designator)(using Context): TermRef = @@ -2959,9 +2775,8 @@ object Types { */ def apply(prefix: Type, name: TermName, denot: Denotation)(using Context): TermRef = apply(prefix, designatorFor(prefix, name, denot)).withDenot(denot) - } - object TypeRef { + object TypeRef: /** Create a type ref with given prefix and name */ def apply(prefix: Type, desig: Designator)(using Context): TypeRef = @@ -2972,7 +2787,6 @@ object Types { */ def apply(prefix: Type, name: TypeName, denot: Denotation)(using Context): TypeRef = apply(prefix, designatorFor(prefix, name, denot)).withDenot(denot) - } // --- Other SingletonTypes: ThisType/SuperType/ConstantType --------------------------- @@ -2981,49 +2795,44 @@ object Types { * Note: we do not pass a class symbol directly, because symbols * do not survive runs whereas typerefs do. */ - abstract case class ThisType(tref: TypeRef) extends CachedProxyType, CaptureRef { - def cls(using Context): ClassSymbol = tref.stableInRunSymbol match { + abstract case class ThisType(tref: TypeRef) extends CachedProxyType, CaptureRef: + def cls(using Context): ClassSymbol = tref.stableInRunSymbol match case cls: ClassSymbol => cls case _ if ctx.mode.is(Mode.Interactive) => defn.AnyClass // was observed to happen in IDE mode - } override def underlying(using Context): Type = if (ctx.erasedTypes) tref - else cls.info match { + else cls.info match case cinfo: ClassInfo => cinfo.selfType case _: ErrorType | NoType if ctx.mode.is(Mode.Interactive) => cls.info // can happen in IDE if `cls` is stale - } def canBeTracked(using Context) = true override def computeHash(bs: Binders): Int = doHash(bs, tref) - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: ThisType => tref.eq(that.tref) case _ => false - } /** Check that the rhs is a ThisType that refers to the same class. */ def sameThis(that: Type)(using Context): Boolean = (that eq this) || that.match case that: ThisType => this.cls eq that.cls case _ => false - } final class CachedThisType(tref: TypeRef) extends ThisType(tref) - object ThisType { + object ThisType: /** Normally one should use ClassSymbol#thisType instead */ def raw(tref: TypeRef)(using Context): CachedThisType = unique(new CachedThisType(tref)) - } /** The type of a super reference cls.super where * `thistpe` is cls.this and `supertpe` is the type of the value referenced * by `super`. */ - abstract case class SuperType(thistpe: Type, supertpe: Type) extends CachedProxyType with SingletonType { + abstract case class SuperType(thistpe: Type, supertpe: Type) extends CachedProxyType with SingletonType: override def underlying(using Context): Type = supertpe override def superType(using Context): Type = if supertpe.typeSymbol.exists then thistpe.baseType(supertpe.typeSymbol) @@ -3034,39 +2843,32 @@ object Types { override def computeHash(bs: Binders): Int = doHash(bs, thistpe, supertpe) - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: SuperType => thistpe.eq(that.thistpe) && supertpe.eq(that.supertpe) case _ => false - } - } final class CachedSuperType(thistpe: Type, supertpe: Type) extends SuperType(thistpe, supertpe) - object SuperType { - def apply(thistpe: Type, supertpe: Type)(using Context): SuperType = { + object SuperType: + def apply(thistpe: Type, supertpe: Type)(using Context): SuperType = assert(thistpe != NoPrefix) unique(new CachedSuperType(thistpe, supertpe)) - } - } /** A constant type with single `value`. */ - abstract case class ConstantType(value: Constant) extends CachedProxyType with SingletonType { + abstract case class ConstantType(value: Constant) extends CachedProxyType with SingletonType: override def underlying(using Context): Type = value.tpe override def computeHash(bs: Binders): Int = doHash(value) - } final class CachedConstantType(value: Constant) extends ConstantType(value) - object ConstantType { - def apply(value: Constant)(using Context): ConstantType = { + object ConstantType: + def apply(value: Constant)(using Context): ConstantType = assertUnerased() unique(new CachedConstantType(value)) - } - } // `refFn` can be null only if `computed` is true. - case class LazyRef(private var refFn: (Context => (Type | Null)) | Null) extends UncachedProxyType with ValueType { + case class LazyRef(private var refFn: (Context => (Type | Null)) | Null) extends UncachedProxyType with ValueType: private var myRef: Type | Null = null private var computed = false @@ -3104,22 +2906,20 @@ object Types { override def toString: String = s"LazyRef(${if (computed) myRef else "..."})" override def equals(other: Any): Boolean = this.eq(other.asInstanceOf[AnyRef]) override def hashCode: Int = System.identityHashCode(this) - } object LazyRef: def of(refFn: Context ?=> (Type | Null)): LazyRef = LazyRef(refFn(using _)) // --- Refined Type and RecType ------------------------------------------------ - abstract class RefinedOrRecType extends CachedProxyType with ValueType { + abstract class RefinedOrRecType extends CachedProxyType with ValueType: def parent: Type - } /** A refined type parent { refinement } * @param parent The type being refined * @param refinedName The name of the refinement declaration * @param refinedInfo The info of the refinement declaration */ - abstract case class RefinedType(parent: Type, refinedName: Name, refinedInfo: Type) extends RefinedOrRecType { + abstract case class RefinedType(parent: Type, refinedName: Name, refinedInfo: Type) extends RefinedOrRecType: if (refinedName.isTermName) assert(refinedInfo.isInstanceOf[TermType]) else assert(refinedInfo.isInstanceOf[TypeType], this) @@ -3144,38 +2944,33 @@ object Types { override def computeHash(bs: Binders): Int = doHash(bs, refinedName, refinedInfo, parent) override def hashIsStable: Boolean = refinedInfo.hashIsStable && parent.hashIsStable - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: RefinedType => refinedName.eq(that.refinedName) && refinedInfo.eq(that.refinedInfo) && parent.eq(that.parent) case _ => false - } // equals comes from case class; no matching override is needed - override def iso(that: Any, bs: BinderPairs): Boolean = that match { + override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: RefinedType => refinedName.eq(that.refinedName) && refinedInfo.equals(that.refinedInfo, bs) && parent.equals(that.parent, bs) case _ => false - } - } class CachedRefinedType(parent: Type, refinedName: Name, refinedInfo: Type) extends RefinedType(parent, refinedName, refinedInfo) - object RefinedType { + object RefinedType: @tailrec def make(parent: Type, names: List[Name], infos: List[Type])(using Context): Type = if (names.isEmpty) parent else make(RefinedType(parent, names.head, infos.head), names.tail, infos.tail) - def apply(parent: Type, name: Name, info: Type)(using Context): RefinedType = { + def apply(parent: Type, name: Name, info: Type)(using Context): RefinedType = assert(!ctx.erasedTypes) unique(new CachedRefinedType(parent, name, info)).checkInst - } - } /** A recursive type. Instances should be constructed via the companion object. * @@ -3202,7 +2997,7 @@ object Types { * * Where `RecThis(...)` points back to the enclosing `RecType`. */ - class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType { + class RecType(parentExp: RecType => Type) extends RefinedOrRecType with BindingType: // See discussion in findMember#goRec why this field is needed private[Types] var openedWithPrefix: Type = NoType @@ -3211,10 +3006,9 @@ object Types { private var myRecThis: RecThis | Null = null - def recThis: RecThis = { + def recThis: RecThis = if (myRecThis == null) myRecThis = new RecThisImpl(this) myRecThis.nn - } override def underlying(using Context): Type = parent @@ -3226,19 +3020,15 @@ object Types { if (parent eq this.parent) this else RecType.closeOver(rt => parent.substRecThis(this, rt.recThis)) - def isReferredToBy(tp: Type)(using Context): Boolean = { - val refacc = new TypeAccumulator[Boolean] { - override def apply(x: Boolean, tp: Type) = x || { - tp match { + def isReferredToBy(tp: Type)(using Context): Boolean = + val refacc = new TypeAccumulator[Boolean]: + override def apply(x: Boolean, tp: Type) = x `||`: + tp match case tp: TypeRef => apply(x, tp.prefix) case tp: RecThis => RecType.this eq tp.binder case tp: LazyRef => this(x, tp.ref) case _ => foldOver(x, tp) - } - } - } refacc.apply(false, tp) - } override def computeHash(bs: Binders): Int = doHash(new SomeBinders(this, bs), parent) @@ -3251,18 +3041,16 @@ object Types { override def equals(that: Any): Boolean = equals(that, null) - override def iso(that: Any, bs: BinderPairs): Boolean = that match { + override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: RecType => parent.equals(that.parent, new SomeBinderPairs(this, that, bs)) case _ => false - } override def toString: String = s"RecType($parent | $hashCode)" private def checkInst(using Context): this.type = this // debug hook - } - object RecType { + object RecType: /** Create a RecType, normalizing its contents. This means: * @@ -3274,33 +3062,28 @@ object Types { * TODO: Figure out how to guarantee absence of cycles * of length > 1 */ - def apply(parentExp: RecType => Type)(using Context): RecType = { + def apply(parentExp: RecType => Type)(using Context): RecType = val rt = new RecType(parentExp) - def normalize(tp: Type): Type = tp.stripTypeVar match { + def normalize(tp: Type): Type = tp.stripTypeVar match case tp: RecType => normalize(tp.parent.substRecThis(tp, rt.recThis)) case tp @ RefinedType(parent, rname, rinfo) => - val rinfo1 = rinfo match { + val rinfo1 = rinfo match case TypeAlias(ref @ TypeRef(RecThis(`rt`), _)) if ref.name == rname => TypeBounds.empty case _ => rinfo - } tp.derivedRefinedType(normalize(parent), rname, rinfo1) case tp => tp - } unique(rt.derivedRecType(normalize(rt.parent))).checkInst - } /** Create a `RecType`, but only if the type generated by `parentExp` is indeed recursive. */ - def closeOver(parentExp: RecType => Type)(using Context): Type = { + def closeOver(parentExp: RecType => Type)(using Context): Type = val rt = this(parentExp) if (rt.isReferredToBy(rt.parent)) rt else rt.parent - } - } // --- AndType/OrType --------------------------------------------------------------- - abstract class AndOrType extends CachedGroundType with ValueType { + abstract class AndOrType extends CachedGroundType with ValueType: def isAnd: Boolean def tp1: Type def tp2: Type @@ -3310,30 +3093,26 @@ object Types { else this match case tp: OrType => OrType.make(tp1, tp2, tp.isSoft) case tp: AndType => AndType.make(tp1, tp2, checkValid = true) - } - abstract case class AndType(tp1: Type, tp2: Type) extends AndOrType { + abstract case class AndType(tp1: Type, tp2: Type) extends AndOrType: def isAnd: Boolean = true private var myBaseClassesPeriod: Period = Nowhere private var myBaseClasses: List[ClassSymbol] = _ /** Base classes of are the merge of the operand base classes. */ - override final def baseClasses(using Context): List[ClassSymbol] = { - if (myBaseClassesPeriod != ctx.period) { + override final def baseClasses(using Context): List[ClassSymbol] = + if (myBaseClassesPeriod != ctx.period) val bcs1 = tp1.baseClasses val bcs1set = BaseClassSet(bcs1) - def recur(bcs2: List[ClassSymbol]): List[ClassSymbol] = bcs2 match { + def recur(bcs2: List[ClassSymbol]): List[ClassSymbol] = bcs2 match case bc2 :: bcs2rest => if (bcs1set contains bc2) if (bc2.is(Trait)) recur(bcs2rest) else bcs1 // common class, therefore rest is the same in both sequences else bc2 :: recur(bcs2rest) case nil => bcs1 - } myBaseClasses = recur(tp2.baseClasses) myBaseClassesPeriod = ctx.period - } myBaseClasses - } private var myFactorCount = 0 override def andFactorCount = @@ -3351,15 +3130,13 @@ object Types { override def computeHash(bs: Binders): Int = doHash(bs, tp1, tp2) - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: AndType => tp1.eq(that.tp1) && tp2.eq(that.tp2) case _ => false - } - } final class CachedAndType(tp1: Type, tp2: Type) extends AndType(tp1, tp2) - object AndType { + object AndType: def apply(tp1: Type, tp2: Type)(using Context): AndType = def where = i"in intersection $tp1 & $tp2" expectValueTypeOrWildcard(tp1, where) @@ -3383,10 +3160,9 @@ object Types { case _ => apply(tp1, tp2) - def unchecked(tp1: Type, tp2: Type)(using Context): AndType = { + def unchecked(tp1: Type, tp2: Type)(using Context): AndType = assertUnerased() unique(new CachedAndType(tp1, tp2)) - } /** Make an AndType using `op` unless clearly unnecessary (i.e. without * going through `&`). @@ -3402,19 +3178,18 @@ object Types { /** Like `make`, but also supports higher-kinded types as argument */ def makeHk(tp1: Type, tp2: Type)(using Context): Type = TypeComparer.liftIfHK(tp1, tp2, AndType.make(_, _, checkValid = false), makeHk, _ | _) - } - abstract case class OrType protected(tp1: Type, tp2: Type) extends AndOrType { + abstract case class OrType protected(tp1: Type, tp2: Type) extends AndOrType: def isAnd: Boolean = false def isSoft: Boolean private var myBaseClassesPeriod: Period = Nowhere private var myBaseClasses: List[ClassSymbol] = _ /** Base classes of are the intersection of the operand base classes. */ - override final def baseClasses(using Context): List[ClassSymbol] = { - if (myBaseClassesPeriod != ctx.period) { + override final def baseClasses(using Context): List[ClassSymbol] = + if (myBaseClassesPeriod != ctx.period) val bcs1 = tp1.baseClasses val bcs1set = BaseClassSet(bcs1) - def recur(bcs2: List[ClassSymbol]): List[ClassSymbol] = bcs2 match { + def recur(bcs2: List[ClassSymbol]): List[ClassSymbol] = bcs2 match case bc2 :: bcs2rest => if (bcs1set contains bc2) if (bc2.is(Trait)) bc2 :: recur(bcs2rest) @@ -3422,12 +3197,9 @@ object Types { else recur(bcs2rest) case nil => bcs2 - } myBaseClasses = recur(tp2.baseClasses) myBaseClassesPeriod = ctx.period - } myBaseClasses - } private var myFactorCount = 0 override def orFactorCount(soft: Boolean) = @@ -3441,15 +3213,13 @@ object Types { private var myJoinPeriod: Period = Nowhere /** Replace or type by the closest non-or type above it */ - def join(using Context): Type = { - if (myJoinPeriod != ctx.period) { + def join(using Context): Type = + if (myJoinPeriod != ctx.period) myJoin = TypeOps.orDominator(this) core.println(i"join of $this == $myJoin") assert(myJoin != this) myJoinPeriod = ctx.period - } myJoin - } private var myUnion: Type = _ private var myUnionPeriod: Period = Nowhere @@ -3500,23 +3270,20 @@ object Types { override def computeHash(bs: Binders): Int = doHash(bs, if isSoft then 0 else 1, tp1, tp2) - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: OrType => tp1.eq(that.tp1) && tp2.eq(that.tp2) && isSoft == that.isSoft case _ => false - } - } final class CachedOrType(tp1: Type, tp2: Type, override val isSoft: Boolean) extends OrType(tp1, tp2) - object OrType { + object OrType: - def apply(tp1: Type, tp2: Type, soft: Boolean)(using Context): OrType = { + def apply(tp1: Type, tp2: Type, soft: Boolean)(using Context): OrType = def where = i"in union $tp1 | $tp2" expectValueTypeOrWildcard(tp1, where) expectValueTypeOrWildcard(tp2, where) assertUnerased() unique(new CachedOrType(tp1, tp2, soft)) - } def balanced(tp1: Type, tp2: Type, soft: Boolean)(using Context): OrType = tp1 match @@ -3542,7 +3309,6 @@ object Types { /** Like `make`, but also supports higher-kinded types as argument */ def makeHk(tp1: Type, tp2: Type)(using Context): Type = TypeComparer.liftIfHK(tp1, tp2, OrType(_, _, soft = true), makeHk, _ & _) - } def expectValueTypeOrWildcard(tp: Type, where: => String)(using Context): Unit = if !tp.isValueTypeOrWildcard then @@ -3556,13 +3322,12 @@ object Types { * case OrNull(tp1) => // tp had the form `tp1 | Null` * case _ => // tp was not a nullable union */ - object OrNull { + object OrNull: def apply(tp: Type)(using Context) = if tp.isNullType then tp else OrType(tp, defn.NullType, soft = false) def unapply(tp: Type)(using Context): Option[Type] = val tp1 = tp.stripNull if tp1 ne tp then Some(tp1) else None - } // ----- ExprType and LambdaTypes ----------------------------------- @@ -3574,7 +3339,7 @@ object Types { /** A by-name parameter type of the form `=> T`, or the type of a method with no parameter list. */ abstract case class ExprType(resType: Type) - extends CachedProxyType with MethodicType { + extends CachedProxyType with MethodicType: override def resultType(using Context): Type = resType override def underlying(using Context): Type = resType @@ -3586,27 +3351,22 @@ object Types { override def computeHash(bs: Binders): Int = doHash(bs, resType) override def hashIsStable: Boolean = resType.hashIsStable - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: ExprType => resType.eq(that.resType) case _ => false - } // equals comes from case class; no matching override is needed - override def iso(that: Any, bs: BinderPairs): Boolean = that match { + override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: ExprType => resType.equals(that.resType, bs) case _ => false - } - } final class CachedExprType(resultType: Type) extends ExprType(resultType) - object ExprType { - def apply(resultType: Type)(using Context): ExprType = { + object ExprType: + def apply(resultType: Type)(using Context): ExprType = assertUnerased() unique(new CachedExprType(resultType)) - } - } /** The lambda type square: * @@ -3637,7 +3397,7 @@ object Types { private var myParamRefs: List[ParamRefType] | Null = null - def paramRefs: List[ParamRefType] = { + def paramRefs: List[ParamRefType] = if myParamRefs == null then def recur(paramNames: List[ThisName], i: Int): List[ParamRefType] = paramNames match @@ -3645,7 +3405,6 @@ object Types { case _ => Nil myParamRefs = recur(paramNames, 0) myParamRefs.nn - } /** Like `paramInfos` but substitute parameter references with the given arguments */ final def instantiateParamInfos(argTypes: => List[Type])(using Context): List[Type] = @@ -3669,10 +3428,9 @@ object Types { * can be obtained using `TypeApplications#typeParams`. */ def integrate(tparams: List[ParamInfo], tp: Type)(using Context): Type = - (tparams: @unchecked) match { + (tparams: @unchecked) match case LambdaParam(lam, _) :: _ => tp.subst(lam, this) // This is where the precondition is necessary. case params: List[Symbol @unchecked] => tp.subst(params, paramRefs) - } final def derivedLambdaType(paramNames: List[ThisName] = this.paramNames, paramInfos: List[PInfo] = this.paramInfos, @@ -3694,14 +3452,13 @@ object Types { override def toString: String = s"$prefixString($paramNames, $paramInfos, $resType)" } - abstract class HKLambda extends CachedProxyType with LambdaType { + abstract class HKLambda extends CachedProxyType with LambdaType: final override def underlying(using Context): Type = resType final override def hashIsStable: Boolean = resType.hashIsStable && paramInfos.hashIsStable final override def equals(that: Any): Boolean = equals(that, null) - } /** The superclass of MethodType and PolyType. */ - sealed abstract class MethodOrPoly extends UncachedGroundType with LambdaType with MethodicType { + sealed abstract class MethodOrPoly extends UncachedGroundType with LambdaType with MethodicType: // Invariants: // (1) mySignatureRunId != NoRunId => mySignature != null @@ -3778,18 +3535,15 @@ object Types { // No definition of `eql` --> fall back on equals, which is `eq` - final override def iso(that: Any, bs: BinderPairs): Boolean = that match { + final override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: MethodOrPoly => paramNames.eqElements(that.paramNames) && - companion.eq(that.companion) && { + companion.eq(that.companion) `&&`: val bs1 = new SomeBinderPairs(this, that, bs) paramInfos.equalElements(that.paramInfos, bs1) && resType.equals(that.resType, bs1) - } case _ => false - } - } trait TermLambda extends LambdaType { thisLambdaType => import DepStatus._ @@ -3799,20 +3553,16 @@ object Types { type ParamRefType = TermParamRef override def resultType(using Context): Type = - if (dependencyStatus == FalseDeps) { // dealias all false dependencies - val dealiasMap = new TypeMap with IdentityCaptRefMap { - def apply(tp: Type) = tp match { + if (dependencyStatus == FalseDeps) // dealias all false dependencies + val dealiasMap = new TypeMap with IdentityCaptRefMap: + def apply(tp: Type) = tp match case tp @ TypeRef(pre, _) => - tp.info match { + tp.info match case TypeAlias(alias) if depStatus(NoDeps, pre) == TrueDeps => apply(alias) case _ => mapOver(tp) - } case _ => mapOver(tp) - } - } dealiasMap(resType) - } else resType private var myDependencyStatus: DependencyStatus = Unknown @@ -3834,12 +3584,11 @@ object Types { else tp match case tp: TypeRef => val status1 = applyPrefix(tp) - tp.info match { // follow type alias to avoid dependency + tp.info match // follow type alias to avoid dependency case TypeAlias(alias) if status1 == TrueDeps => combine(compute(status, alias, theAcc), FalseDeps) case _ => status1 - } case tp: TermRef => applyPrefix(tp) case tp: AppliedType => tp.fold(status, compute(_, _, theAcc)) case tp: TypeVar if !tp.isInstantiated => combine(status, Provisional) @@ -3847,11 +3596,10 @@ object Types { case tp: AnnotatedType => tp match case CapturingType(parent, refs) => - (compute(status, parent, theAcc) /: refs.elems) { + (compute(status, parent, theAcc) /: refs.elems): (s, ref) => ref match case tp: TermParamRef if tp.binder eq thisLambdaType => combine(s, CaptureDeps) case _ => s - } case _ => if tp.annot.refersToParamOf(thisLambdaType) then TrueDeps else compute(status, tp.parent, theAcc) @@ -3874,24 +3622,22 @@ object Types { */ private def dependencyStatus(using Context): DependencyStatus = if (myDependencyStatus != Unknown) myDependencyStatus - else { + else val result = depStatus(NoDeps, resType) if ((result & Provisional) == 0) myDependencyStatus = result (result & StatusMask).toByte - } /** The parameter dependency status of this method. Analogous to `dependencyStatus`, * but tracking dependencies in same parameter list. */ private def paramDependencyStatus(using Context): DependencyStatus = if (myParamDependencyStatus != Unknown) myParamDependencyStatus - else { + else val result = if (paramInfos.isEmpty) NoDeps else paramInfos.tail.foldLeft(NoDeps)(depStatus(_, _)) if ((result & Provisional) == 0) myParamDependencyStatus = result (result & StatusMask).toByte - } /** Does result type contain references to parameters of this method type, * which cannot be eliminated by de-aliasing? @@ -3915,8 +3661,8 @@ object Types { /** The least supertype of `resultType` that does not contain parameter dependencies */ def nonDependentResultApprox(using Context): Type = if isResultDependent then - val dropDependencies = new ApproximatingTypeMap with IdempotentCaptRefMap { - def apply(tp: Type) = tp match { + val dropDependencies = new ApproximatingTypeMap with IdempotentCaptRefMap: + def apply(tp: Type) = tp match case tp @ TermParamRef(`thisLambdaType`, _) => range(defn.NothingType, atVariance(1)(apply(tp.underlying))) case CapturingType(_, _) => @@ -3930,8 +3676,6 @@ object Types { else parent1 case _ => mapOver(tp) - } - } dropDependencies(resultType) else resultType } @@ -3965,13 +3709,12 @@ object Types { final class CachedMethodType(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type, val companion: MethodTypeCompanion) extends MethodType(paramNames)(paramInfosExp, resultTypeExp) - abstract class LambdaTypeCompanion[N <: Name, PInfo <: Type, LT <: LambdaType] { + abstract class LambdaTypeCompanion[N <: Name, PInfo <: Type, LT <: LambdaType]: def syntheticParamName(n: Int): N @sharable private val memoizedNames = util.HashMap[Int, List[N]]() - def syntheticParamNames(n: Int): List[N] = synchronized { + def syntheticParamNames(n: Int): List[N] = synchronized: memoizedNames.getOrElseUpdate(n, (0 until n).map(syntheticParamName).toList) - } def apply(paramNames: List[N])(paramInfosExp: LT => List[PInfo], resultTypeExp: LT => Type)(using Context): LT def apply(paramNames: List[N], paramInfos: List[PInfo], resultType: Type)(using Context): LT = @@ -3993,22 +3736,18 @@ object Types { else apply(params.map(_.paramName))( tl => params.map(param => toPInfo(addSealed(param, tl.integrate(params, param.paramInfo)))), tl => tl.integrate(params, resultType)) - } abstract class TermLambdaCompanion[LT <: TermLambda] - extends LambdaTypeCompanion[TermName, Type, LT] { + extends LambdaTypeCompanion[TermName, Type, LT]: def toPInfo(tp: Type)(using Context): Type = tp def syntheticParamName(n: Int): TermName = nme.syntheticParamName(n) - } abstract class TypeLambdaCompanion[LT <: TypeLambda] - extends LambdaTypeCompanion[TypeName, TypeBounds, LT] { - def toPInfo(tp: Type)(using Context): TypeBounds = (tp: @unchecked) match { + extends LambdaTypeCompanion[TypeName, TypeBounds, LT]: + def toPInfo(tp: Type)(using Context): TypeBounds = (tp: @unchecked) match case tp: TypeBounds => tp case tp: ErrorType => TypeAlias(tp) - } def syntheticParamName(n: Int): TypeName = tpnme.syntheticTypeParamName(n) - } abstract class MethodTypeCompanion(val prefixString: String) extends TermLambdaCompanion[MethodType] { self => @@ -4062,34 +3801,30 @@ object Types { def apply(paramNames: List[TermName])(paramInfosExp: MethodType => List[Type], resultTypeExp: MethodType => Type)(using Context): MethodType = checkValid(unique(new CachedMethodType(paramNames)(paramInfosExp, resultTypeExp, self))) - def checkValid(mt: MethodType)(using Context): mt.type = { + def checkValid(mt: MethodType)(using Context): mt.type = if (Config.checkMethodTypes) for ((paramInfo, idx) <- mt.paramInfos.zipWithIndex) - paramInfo.foreachPart { + paramInfo.foreachPart: case TermParamRef(`mt`, j) => assert(j < idx, mt) case _ => - } mt - } } - object MethodType extends MethodTypeCompanion("MethodType") { + object MethodType extends MethodTypeCompanion("MethodType"): def companion(isContextual: Boolean = false, isImplicit: Boolean = false): MethodTypeCompanion = if (isContextual) ContextualMethodType else if (isImplicit) ImplicitMethodType else MethodType - } object ContextualMethodType extends MethodTypeCompanion("ContextualMethodType") object ImplicitMethodType extends MethodTypeCompanion("ImplicitMethodType") /** A ternary extractor for MethodType */ - object MethodTpe { + object MethodTpe: def unapply(mt: MethodType)(using Context): Some[(List[TermName], List[Type], Type)] = Some((mt.paramNames, mt.paramInfos, mt.resultType)) - } - trait TypeLambda extends LambdaType { + trait TypeLambda extends LambdaType: type ThisName = TypeName type PInfo = TypeBounds type This <: TypeLambda @@ -4104,7 +3839,7 @@ object Types { paramNames.indices.toList.map(new LambdaParam(this, _)) def derivedLambdaAbstraction(paramNames: List[TypeName], paramInfos: List[TypeBounds], resType: Type)(using Context): Type = - resType match { + resType match case resType: AliasingBounds => resType.derivedAlias(newLikeThis(paramNames, paramInfos, resType.alias)) case resType @ TypeBounds(lo, hi) => @@ -4113,8 +3848,6 @@ object Types { newLikeThis(paramNames, paramInfos, hi)) case _ => derivedLambdaType(paramNames, paramInfos, resType) - } - } /** A type lambda of the form `[X_0 B_0, ..., X_n B_n] => T` * @@ -4132,7 +3865,7 @@ object Types { */ class HKTypeLambda(val paramNames: List[TypeName], @constructorOnly variances: List[Variance])( paramInfosExp: HKTypeLambda => List[TypeBounds], resultTypeExp: HKTypeLambda => Type) - extends HKLambda with TypeLambda { + extends HKLambda with TypeLambda: type This = HKTypeLambda def companion: HKTypeLambda.type = HKTypeLambda @@ -4156,7 +3889,7 @@ object Types { // No definition of `eql` --> fall back on equals, which calls iso - final override def iso(that: Any, bs: BinderPairs): Boolean = that match { + final override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: HKTypeLambda => paramNames.eqElements(that.paramNames) && isDeclaredVarianceLambda == that.isDeclaredVarianceLambda @@ -4172,7 +3905,6 @@ object Types { } case _ => false - } override def newLikeThis(paramNames: List[ThisName], paramInfos: List[PInfo], resType: Type)(using Context): This = newLikeThis(paramNames, declaredVariances, paramInfos, resType) @@ -4193,14 +3925,13 @@ object Types { assert(resType.isInstanceOf[TermType], this) assert(paramNames.nonEmpty) - } /** The type of a polymorphic method. It has the same form as HKTypeLambda, * except it applies to terms and parameters do not have variances. */ class PolyType(val paramNames: List[TypeName])( paramInfosExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type) - extends MethodOrPoly with TypeLambda { + extends MethodOrPoly with TypeLambda: type This = PolyType def companion: PolyType.type = PolyType @@ -4217,25 +3948,21 @@ object Types { /** Merge nested polytypes into one polytype. nested polytypes are normally not supported * but can arise as temporary data structures. */ - def flatten(using Context): PolyType = resType match { + def flatten(using Context): PolyType = resType match case that: PolyType => - val shiftedSubst = (x: PolyType) => new TypeMap { - def apply(t: Type) = t match { + val shiftedSubst = (x: PolyType) => new TypeMap: + def apply(t: Type) = t match case TypeParamRef(`that`, n) => x.paramRefs(n + paramNames.length) case t => mapOver(t) - } - } PolyType(paramNames ++ that.paramNames)( x => this.paramInfos.mapConserve(_.subst(this, x).bounds) ++ that.paramInfos.mapConserve(shiftedSubst(x)(_).bounds), x => shiftedSubst(x)(that.resultType).subst(this, x)) case _ => this - } protected def prefixString: String = "PolyType" - } - object HKTypeLambda extends TypeLambdaCompanion[HKTypeLambda] { + object HKTypeLambda extends TypeLambdaCompanion[HKTypeLambda]: def apply(paramNames: List[TypeName])( paramInfosExp: HKTypeLambda => List[TypeBounds], resultTypeExp: HKTypeLambda => Type)(using Context): HKTypeLambda = @@ -4282,7 +4009,7 @@ object Types { * type T[A, B] = A => B // A is contravariant, B is covariant (determined structurally) * type T[A, +B] = A => B // A is invariant, B is covariant */ - def boundsFromParams[PI <: ParamInfo.Of[TypeName]](params: List[PI], bounds: TypeBounds)(using Context): TypeBounds = { + def boundsFromParams[PI <: ParamInfo.Of[TypeName]](params: List[PI], bounds: TypeBounds)(using Context): TypeBounds = def expand(tp: Type, useVariances: Boolean) = if params.nonEmpty && useVariances then apply(params.map(_.paramName), params.map(_.paramVariance))( @@ -4293,7 +4020,7 @@ object Types { def isOpaqueAlias = params match case (param: Symbol) :: _ => param.owner.is(Opaque) case _ => false - bounds match { + bounds match case bounds: MatchAlias => bounds.derivedAlias(expand(bounds.alias, true)) case bounds: TypeAlias => @@ -4303,11 +4030,8 @@ object Types { bounds.derivedTypeBounds( if lo.isRef(defn.NothingClass) then lo else expand(lo, true), expand(hi, true)) - } - } - } - object PolyType extends TypeLambdaCompanion[PolyType] { + object PolyType extends TypeLambdaCompanion[PolyType]: def apply(paramNames: List[TypeName])( paramInfosExp: PolyType => List[TypeBounds], resultTypeExp: PolyType => Type)(using Context): PolyType = @@ -4325,9 +4049,8 @@ object Types { def unapply(tl: PolyType): Some[(List[LambdaParam], Type)] = Some((tl.typeParams, tl.resType)) - } - private object DepStatus { + private object DepStatus: type DependencyStatus = Byte final val Unknown: DependencyStatus = 0 // not yet computed final val NoDeps: DependencyStatus = 1 // no dependent parameters found @@ -4336,12 +4059,11 @@ object Types { final val TrueDeps: DependencyStatus = 4 // some truly dependent parameters exist final val StatusMask: DependencyStatus = 7 // the bits indicating actual dependency status final val Provisional: DependencyStatus = 8 // set if dependency status can still change due to type variable instantiations - } // ----- Type application: LambdaParam, AppliedType --------------------- /** The parameter of a type lambda */ - case class LambdaParam(tl: TypeLambda, n: Int) extends ParamInfo, printing.Showable { + case class LambdaParam(tl: TypeLambda, n: Int) extends ParamInfo, printing.Showable: type ThisName = TypeName def isTypeParam(using Context): Boolean = tl.paramNames.head.isTypeName @@ -4388,11 +4110,10 @@ object Types { myVariance def toText(printer: Printer): Text = printer.toText(this) - } /** A type application `C[T_1, ..., T_n]` */ abstract case class AppliedType(tycon: Type, args: List[Type]) - extends CachedProxyType with ValueType { + extends CachedProxyType with ValueType: private var validSuper: Period = Nowhere private var cachedSuper: Type = uninitialized @@ -4444,12 +4165,11 @@ object Types { case _ => defn.AnyType cachedSuper - override def translucentSuperType(using Context): Type = tycon match { + override def translucentSuperType(using Context): Type = tycon match case tycon: TypeRef if tycon.symbol.isOpaqueAlias => tycon.translucentSuperType.applyIfParameterized(args) case _ => tryNormalize.orElse(superType) - } inline def map(inline op: Type => Type)(using Context) = def mapArgs(args: List[Type]): List[Type] = args match @@ -4463,22 +4183,18 @@ object Types { case nil => x foldArgs(op(x, tycon), args) - override def tryNormalize(using Context): Type = tycon.stripTypeVar match { + override def tryNormalize(using Context): Type = tycon.stripTypeVar match case tycon: TypeRef => - def tryMatchAlias = tycon.info match { + def tryMatchAlias = tycon.info match case MatchAlias(alias) => - trace(i"normalize $this", typr, show = true) { - MatchTypeTrace.recurseWith(this) { + trace(i"normalize $this", typr, show = true): + MatchTypeTrace.recurseWith(this): alias.applyIfParameterized(args.map(_.normalized)).tryNormalize - } - } case _ => NoType - } tryCompiletimeConstantFold.orElse(tryMatchAlias) case _ => NoType - } /** Does this application expand to a match type? */ def isMatchAlias(using Context): Boolean = tycon.stripTypeVar match @@ -4507,24 +4223,21 @@ object Types { myEvalued = res res - def lowerBound(using Context): Type = tycon.stripTypeVar match { + def lowerBound(using Context): Type = tycon.stripTypeVar match case tycon: TypeRef => - tycon.info match { + tycon.info match case TypeBounds(lo, hi) => if (lo eq hi) superType // optimization, can profit from caching in this case else lo.applyIfParameterized(args) case _ => NoType - } case tycon: AppliedType => tycon.lowerBound.applyIfParameterized(args) case _ => NoType - } - def tyconTypeParams(using Context): List[ParamInfo] = { + def tyconTypeParams(using Context): List[ParamInfo] = val tparams = tycon.typeParams if (tparams.isEmpty) HKTypeLambda.any(args.length).typeParams else tparams - } def hasWildcardArg(using Context): Boolean = args.exists(isBounds) @@ -4534,87 +4247,75 @@ object Types { override def computeHash(bs: Binders): Int = doHash(bs, tycon, args) - override def hashIsStable: Boolean = { + override def hashIsStable: Boolean = if (myStableHash == 0) myStableHash = if (tycon.hashIsStable && args.hashIsStable) 1 else -1 myStableHash > 0 - } override def eql(that: Type): Boolean = this `eq` that // safe because applied types are hash-consed separately // equals comes from case class; no matching override is needed - final override def iso(that: Any, bs: BinderPairs): Boolean = that match { + final override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: AppliedType => tycon.equals(that.tycon, bs) && args.equalElements(that.args, bs) case _ => false - } - } - final class CachedAppliedType(tycon: Type, args: List[Type], hc: Int) extends AppliedType(tycon, args) { + final class CachedAppliedType(tycon: Type, args: List[Type], hc: Int) extends AppliedType(tycon, args): myHash = hc - } - object AppliedType { - def apply(tycon: Type, args: List[Type])(using Context): AppliedType = { + object AppliedType: + def apply(tycon: Type, args: List[Type])(using Context): AppliedType = assertUnerased() ctx.base.uniqueAppliedTypes.enterIfNew(tycon, args) - } - } // ----- BoundTypes: ParamRef, RecThis ---------------------------------------- - abstract class BoundType extends CachedProxyType with ValueType { + abstract class BoundType extends CachedProxyType with ValueType: type BT <: Type val binder: BT def copyBoundType(bt: BT): Type override def hashIsStable: Boolean = false - } - abstract class ParamRef extends BoundType { + abstract class ParamRef extends BoundType: type BT <: LambdaType def paramNum: Int def paramName: binder.ThisName = binder.paramNames(paramNum) - override def underlying(using Context): Type = { + override def underlying(using Context): Type = // TODO: update paramInfos's type to nullable val infos: List[Type] | Null = binder.paramInfos if (infos == null) NoType // this can happen if the referenced generic type is not initialized yet else infos(paramNum) - } override def computeHash(bs: Binders): Int = doHash(paramNum, binder.identityHash(bs)) override def equals(that: Any): Boolean = equals(that, null) - override def iso(that: Any, bs: BinderPairs): Boolean = that match { + override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: ParamRef => paramNum == that.paramNum && binder.equalBinder(that.binder, bs) case _ => false - } protected def kindString: String override def toString: String = try s"${kindString}ParamRef($paramName)" - catch { + catch case ex: IndexOutOfBoundsException => s"ParamRef()" - } - } /** Only created in `binder.paramRefs`. Use `binder.paramRefs(paramNum)` to * refer to `TermParamRef(binder, paramNum)`. */ - abstract case class TermParamRef(binder: TermLambda, paramNum: Int) extends ParamRef, CaptureRef { + abstract case class TermParamRef(binder: TermLambda, paramNum: Int) extends ParamRef, CaptureRef: type BT = TermLambda def canBeTracked(using Context) = true def kindString: String = "Term" def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) - } private final class TermParamRefImpl(binder: TermLambda, paramNum: Int) extends TermParamRef(binder, paramNum) /** Only created in `binder.paramRefs`. Use `binder.paramRefs(paramNum)` to * refer to `TypeParamRef(binder, paramNum)`. */ - abstract case class TypeParamRef(binder: TypeLambda, paramNum: Int) extends ParamRef { + abstract case class TypeParamRef(binder: TypeLambda, paramNum: Int) extends ParamRef: type BT = TypeLambda def kindString: String = "Type" def copyBoundType(bt: BT): Type = bt.paramRefs(paramNum) @@ -4628,20 +4329,18 @@ object Types { * - fromBelow and param <:< bound * - !fromBelow and param >:> bound */ - def occursIn(bound: Type, fromBelow: Boolean)(using Context): Boolean = bound.stripTypeVar match { + def occursIn(bound: Type, fromBelow: Boolean)(using Context): Boolean = bound.stripTypeVar match case bound: ParamRef => bound == this case bound: AndType => occursIn(bound.tp1, fromBelow) && occursIn(bound.tp2, fromBelow) case bound: OrType => occursIn(bound.tp1, fromBelow) || occursIn(bound.tp2, fromBelow) case _ => false - } - } private final class TypeParamRefImpl(binder: TypeLambda, paramNum: Int) extends TypeParamRef(binder, paramNum) /** a self-reference to an enclosing recursive type. The only creation method is * `binder.recThis`, returning `RecThis(binder)`. */ - abstract case class RecThis(binder: RecType) extends BoundType with SingletonType { + abstract case class RecThis(binder: RecType) extends BoundType with SingletonType: type BT = RecType override def underlying(using Context): RecType = binder def copyBoundType(bt: BT): RecThis = bt.recThis @@ -4652,17 +4351,14 @@ object Types { override def equals(that: Any): Boolean = equals(that, null) - override def iso(that: Any, bs: BinderPairs): Boolean = that match { + override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: RecThis => binder.equalBinder(that.binder, bs) case _ => false - } override def toString: String = try s"RecThis(${binder.hashCode})" - catch { + catch case ex: NullPointerException => s"RecThis()" - } - } private final class RecThisImpl(binder: RecType) extends RecThis(binder) @@ -4676,7 +4372,7 @@ object Types { * Note that care is needed when creating them, since not all types need to be inhabited. * A skolem is equal to itself and no other type. */ - case class SkolemType(info: Type) extends CachedProxyType with ValueType with SingletonType { + case class SkolemType(info: Type) extends CachedProxyType with ValueType with SingletonType: override def underlying(using Context): Type = info def derivedSkolemType(info: Type)(using Context): SkolemType = if (info eq this.info) this else SkolemType(info) @@ -4691,13 +4387,11 @@ object Types { //assert(id != 10) private var myRepr: Name | Null = null - def repr(using Context): Name = { + def repr(using Context): Name = if (myRepr == null) myRepr = SkolemName.fresh() myRepr.nn - } override def toString: String = s"SkolemType($hashCode)" - } /** A skolem type used to wrap the type of the qualifier of a selection. * @@ -4706,13 +4400,11 @@ object Types { * [[TypeOps#asSeenFrom]] may treat it specially for optimization purposes, * see its implementation for more details. */ - class QualSkolemType(info: Type) extends SkolemType(info) { + class QualSkolemType(info: Type) extends SkolemType(info): override def derivedSkolemType(info: Type)(using Context): SkolemType = if (info eq this.info) this else QualSkolemType(info) - } - object QualSkolemType { + object QualSkolemType: def apply(info: Type): QualSkolemType = new QualSkolemType(info) - } // ------------ Type variables ---------------------------------------- @@ -4731,7 +4423,7 @@ object Types { * @param creatorState the typer state in which the variable was created. * @param initNestingLevel the initial nesting level of the type variable. (c.f. nestingLevel) */ - final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState | Null, val initNestingLevel: Int) extends CachedProxyType with ValueType { + final class TypeVar private(initOrigin: TypeParamRef, creatorState: TyperState | Null, val initNestingLevel: Int) extends CachedProxyType with ValueType: private var currentOrigin = initOrigin def origin: TypeParamRef = currentOrigin @@ -4794,7 +4486,7 @@ object Types { def isInstantiated(using Context): Boolean = instanceOpt.exists /** Instantiate variable with given type */ - def instantiateWith(tp: Type)(using Context): Type = { + def instantiateWith(tp: Type)(using Context): Type = assert(tp ne this, i"self instantiation of $origin, constraint = ${ctx.typerState.constraint}") assert(!myInst.exists, i"$origin is already instantiated to $myInst but we attempted to instantiate it to $tp") typr.println(i"instantiating $this with $tp") @@ -4807,7 +4499,6 @@ object Types { setInst(tp) ctx.typerState.constraint = ctx.typerState.constraint.replace(origin, tp) tp - } /** Instantiate variable from the constraints over its `origin`. * If `fromBelow` is true, the variable is instantiated to the lub @@ -4840,27 +4531,23 @@ object Types { /** Unwrap to instance (if instantiated) or origin (if not), until result * is no longer a TypeVar */ - override def stripTypeVar(using Context): Type = { + override def stripTypeVar(using Context): Type = val inst = instanceOpt if (inst.exists) inst.stripTypeVar else origin - } override def stripped(using Context): Type = stripTypeVar.stripped /** If the variable is instantiated, its instance, otherwise its origin */ - override def underlying(using Context): Type = { + override def underlying(using Context): Type = val inst = instanceOpt if (inst.exists) inst else origin - } override def computeHash(bs: Binders): Int = identityHash(bs) override def equals(that: Any): Boolean = this.eq(that.asInstanceOf[AnyRef]) - override def toString: String = { + override def toString: String = def instStr = if (inst.exists) s" -> $inst" else "" s"TypeVar($origin$instStr)" - } - } object TypeVar: def apply(using Context)(initOrigin: TypeParamRef, creatorState: TyperState | Null, nestingLevel: Int = ctx.nestingLevel) = new TypeVar(initOrigin, creatorState, nestingLevel) @@ -4877,15 +4564,14 @@ object Types { * * and `X_1,...X_n` are the type variables bound in `patternType` */ - abstract case class MatchType(bound: Type, scrutinee: Type, cases: List[Type]) extends CachedProxyType with ValueType { + abstract case class MatchType(bound: Type, scrutinee: Type, cases: List[Type]) extends CachedProxyType with ValueType: def derivedMatchType(bound: Type, scrutinee: Type, cases: List[Type])(using Context): MatchType = if (bound.eq(this.bound) && scrutinee.eq(this.scrutinee) && cases.eqElements(this.cases)) this else MatchType(bound, scrutinee, cases) - def caseType(tp: Type)(using Context): Type = tp match { + def caseType(tp: Type)(using Context): Type = tp match case tp: HKTypeLambda => caseType(tp.resType) case defn.MatchCase(_, body) => body - } def alternatives(using Context): List[Type] = cases.map(caseType) def underlying(using Context): Type = bound @@ -4900,9 +4586,9 @@ object Types { case ex: Throwable => handleRecursive("normalizing", s"${scrutinee.show} match ..." , ex) - def reduced(using Context): Type = { + def reduced(using Context): Type = - def contextInfo(tp: Type): Type = tp match { + def contextInfo(tp: Type): Type = tp match case tp: TypeParamRef => val constraint = ctx.typerState.constraint if (constraint.entry(tp).exists) TypeComparer.fullBounds(tp) @@ -4912,7 +4598,6 @@ object Types { if (bounds == null) NoType else bounds case tp: TypeVar => tp.underlying - } def updateReductionContext(footprint: collection.Set[Type]): Unit = reductionContext = util.HashMap() @@ -4934,7 +4619,7 @@ object Types { record("MatchType.reduce computed") if (myReduced != null) record("MatchType.reduce cache miss") myReduced = - trace(i"reduce match type $this $hashCode", matchTypes, show = true) { + trace(i"reduce match type $this $hashCode", matchTypes, show = true): def matchCases(cmp: TrackingTypeComparer): Type = val saved = ctx.typerState.snapshot() try cmp.matchCases(scrutinee.normalized, cases) @@ -4947,29 +4632,24 @@ object Types { // instantiations during matchtype reduction TypeComparer.tracked(matchCases) - } myReduced.nn - } /** True if the reduction uses GADT constraints. */ def reducesUsingGadt(using Context): Boolean = - (reductionContext ne null) && reductionContext.keysIterator.exists { + (reductionContext ne null) && reductionContext.keysIterator.exists: case tp: TypeRef => reductionContext(tp).exists case _ => false - } override def computeHash(bs: Binders): Int = doHash(bs, scrutinee, bound :: cases) - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: MatchType => bound.eq(that.bound) && scrutinee.eq(that.scrutinee) && cases.eqElements(that.cases) case _ => false - } - } class CachedMatchType(bound: Type, scrutinee: Type, cases: List[Type]) extends MatchType(bound, scrutinee, cases) - object MatchType { + object MatchType: def apply(bound: Type, scrutinee: Type, cases: List[Type])(using Context): MatchType = unique(new CachedMatchType(bound, scrutinee, cases)) @@ -4982,12 +4662,11 @@ object Types { object InDisguise: def unapply(tp: AppliedType)(using Context): Option[MatchType] = tp match case AppliedType(tycon: TypeRef, args) => tycon.info match - case MatchAlias(alias) => alias.applyIfParameterized(args) match - case mt: MatchType => Some(mt) + case MatchAlias(alias) => alias.applyIfParameterized(args) match + case mt: MatchType => Some(mt) + case _ => None case _ => None - case _ => None case _ => None - } // ------ ClassInfo, Type Bounds -------------------------------------------------- @@ -5010,7 +4689,7 @@ object Types { cls: ClassSymbol, declaredParents: List[Type], decls: Scope, - selfInfo: TypeOrSymbol) extends CachedGroundType with TypeType { + selfInfo: TypeOrSymbol) extends CachedGroundType with TypeType: private var selfTypeCache: Type | Null = null private var appliedRefCache: Type | Null = null @@ -5019,10 +4698,10 @@ object Types { * - the explicit self type if given (or the info of a given self symbol), and * - the fully applied reference to the class itself. */ - def selfType(using Context): Type = { + def selfType(using Context): Type = val clsd = cls.classDenot if (selfTypeCache == null) - selfTypeCache = { + selfTypeCache = val givenSelf = clsd.givenSelfType if (!givenSelf.isValueType) appliedRef else if (clsd.is(Module)) givenSelf @@ -5032,25 +4711,21 @@ object Types { givenSelf1.derivedAnnotatedType(tp & appliedRef, givenSelf1.annot) case _ => AndType(givenSelf, appliedRef) - } selfTypeCache.nn - } - def appliedRef(using Context): Type = { + def appliedRef(using Context): Type = if (appliedRefCache == null) appliedRefCache = TypeRef(prefix, cls).appliedTo(cls.classDenot.typeParams.map(_.typeRef)) appliedRefCache.nn - } // cached because baseType needs parents private var parentsCache: List[Type] | Null = null - override def parents(using Context): List[Type] = { + override def parents(using Context): List[Type] = if (parentsCache == null) parentsCache = declaredParents.mapConserve(_.asSeenFrom(prefix, cls.owner)) parentsCache.nn - } protected def newLikeThis(prefix: Type, declaredParents: List[Type], decls: Scope, selfInfo: TypeOrSymbol)(using Context): ClassInfo = ClassInfo(prefix, cls, declaredParents, decls, selfInfo) @@ -5099,7 +4774,7 @@ object Types { override def computeHash(bs: Binders | Null): Int = doHash(bs, cls, prefix) override def hashIsStable: Boolean = prefix.hashIsStable && declaredParents.hashIsStable - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: ClassInfo => prefix.eq(that.prefix) && cls.eq(that.cls) && @@ -5107,11 +4782,10 @@ object Types { decls.eq(that.decls) && selfInfo.eq(that.selfInfo) case _ => false - } override def equals(that: Any): Boolean = equals(that, null) - override def iso(that: Any, bs: BinderPairs): Boolean = that match { + override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: ClassInfo => prefix.equals(that.prefix, bs) && cls.eq(that.cls) && @@ -5119,17 +4793,15 @@ object Types { decls.eq(that.decls) && selfInfo.eq(that.selfInfo) case _ => false - } override def toString: String = s"ClassInfo($prefix, $cls, $declaredParents)" - } class CachedClassInfo(prefix: Type, cls: ClassSymbol, declaredParents: List[Type], decls: Scope, selfInfo: TypeOrSymbol) extends ClassInfo(prefix, cls, declaredParents, decls, selfInfo) /** A class for temporary class infos where `parents` are not yet known */ final class TempClassInfo(prefix: Type, cls: ClassSymbol, decls: Scope, selfInfo: TypeOrSymbol) - extends CachedClassInfo(prefix, cls, Nil, decls, selfInfo) { + extends CachedClassInfo(prefix, cls, Nil, decls, selfInfo): /** Convert to classinfo with known parents */ def finalized(parents: List[Type])(using Context): ClassInfo = @@ -5139,15 +4811,13 @@ object Types { TempClassInfo(prefix, cls, decls, selfInfo) override def toString: String = s"TempClassInfo($prefix, $cls)" - } - object ClassInfo { + object ClassInfo: def apply(prefix: Type, cls: ClassSymbol, declaredParents: List[Type], decls: Scope, selfInfo: TypeOrSymbol = NoType)(using Context): ClassInfo = unique(new CachedClassInfo(prefix, cls, declaredParents, decls, selfInfo)) - } /** Type bounds >: lo <: hi */ - abstract case class TypeBounds(lo: Type, hi: Type) extends CachedProxyType with TypeType { + abstract case class TypeBounds(lo: Type, hi: Type) extends CachedProxyType with TypeType: assert(lo.isInstanceOf[TermType], lo) assert(hi.isInstanceOf[TermType], hi) @@ -5159,7 +4829,7 @@ object Types { if ((lo eq this.lo) && (hi eq this.hi)) this else TypeBounds(lo, hi) - def contains(tp: Type)(using Context): Boolean = tp match { + def contains(tp: Type)(using Context): Boolean = tp match case tp: TypeBounds => lo <:< tp.lo && tp.hi <:< hi case tp: ClassInfo => val cls = tp.cls @@ -5170,7 +4840,6 @@ object Types { .withDenot(new UniqueRefDenotation(cls, tp, cls.validFor, tp.prefix))) case _ => lo <:< tp && tp <:< hi - } def & (that: TypeBounds)(using Context): TypeBounds = // This will try to preserve the FromJavaObjects type in upper bounds. @@ -5197,63 +4866,54 @@ object Types { else if ((that.lo frozen_<:< this.lo) && (this.hi frozen_<:< that.hi)) that else TypeBounds(this.lo & that.lo, this.hi | that.hi) - override def & (that: Type)(using Context): Type = that match { + override def & (that: Type)(using Context): Type = that match case that: TypeBounds => this & that case _ => super.& (that) - } - override def | (that: Type)(using Context): Type = that match { + override def | (that: Type)(using Context): Type = that match case that: TypeBounds => this | that case _ => super.| (that) - } override def computeHash(bs: Binders): Int = doHash(bs, lo, hi) override def hashIsStable: Boolean = lo.hashIsStable && hi.hashIsStable override def equals(that: Any): Boolean = equals(that, null) - override def iso(that: Any, bs: BinderPairs): Boolean = that match { + override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: AliasingBounds => false case that: TypeBounds => lo.equals(that.lo, bs) && hi.equals(that.hi, bs) case _ => false - } - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: AliasingBounds => false case that: TypeBounds => lo.eq(that.lo) && hi.eq(that.hi) case _ => false - } - } class RealTypeBounds(lo: Type, hi: Type) extends TypeBounds(lo, hi) /** Common supertype of `TypeAlias` and `MatchAlias` */ - abstract class AliasingBounds(val alias: Type) extends TypeBounds(alias, alias) { + abstract class AliasingBounds(val alias: Type) extends TypeBounds(alias, alias): def derivedAlias(alias: Type)(using Context): AliasingBounds override def computeHash(bs: Binders): Int = doHash(bs, alias) override def hashIsStable: Boolean = alias.hashIsStable - override def iso(that: Any, bs: BinderPairs): Boolean = that match { + override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: AliasingBounds => this.isTypeAlias == that.isTypeAlias && alias.equals(that.alias, bs) case _ => false - } // equals comes from case class; no matching override is needed - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: AliasingBounds => this.isTypeAlias == that.isTypeAlias && alias.eq(that.alias) case _ => false - } - } /** = T */ - class TypeAlias(alias: Type) extends AliasingBounds(alias) { + class TypeAlias(alias: Type) extends AliasingBounds(alias): def derivedAlias(alias: Type)(using Context): AliasingBounds = if (alias eq this.alias) this else TypeAlias(alias) - } /** = T where `T` is a `MatchType` * @@ -5262,12 +4922,11 @@ object Types { * If we assumed full substitutivity, we would have to reject all recursive match * aliases (or else take the jump and allow full recursive types). */ - class MatchAlias(alias: Type) extends AliasingBounds(alias) { + class MatchAlias(alias: Type) extends AliasingBounds(alias): def derivedAlias(alias: Type)(using Context): AliasingBounds = if (alias eq this.alias) this else MatchAlias(alias) - } - object TypeBounds { + object TypeBounds: def apply(lo: Type, hi: Type)(using Context): TypeBounds = unique(new RealTypeBounds(lo, hi)) def empty(using Context): TypeBounds = @@ -5284,22 +4943,19 @@ object Types { if top.isExactlyAny then empty else apply(defn.NothingType, top) def upper(hi: Type)(using Context): TypeBounds = apply(defn.NothingType, hi) def lower(lo: Type)(using Context): TypeBounds = apply(lo, defn.AnyType) - } - object TypeAlias { + object TypeAlias: def apply(alias: Type)(using Context): TypeAlias = unique(new TypeAlias(alias)) def unapply(tp: TypeAlias): Option[Type] = Some(tp.alias) - } - object MatchAlias { + object MatchAlias: def apply(alias: Type)(using Context): MatchAlias = unique(new MatchAlias(alias)) def unapply(tp: MatchAlias): Option[Type] = Some(tp.alias) - } // ----- Annotated and Import types ----------------------------------------------- /** An annotated type tpe @ annot */ - abstract case class AnnotatedType(parent: Type, annot: Annotation) extends CachedProxyType, ValueType { + abstract case class AnnotatedType(parent: Type, annot: Annotation) extends CachedProxyType, ValueType: override def underlying(using Context): Type = parent @@ -5320,13 +4976,11 @@ object Types { private var isRefiningKnown = false private var isRefiningCache: Boolean = _ - def isRefining(using Context): Boolean = { - if (!isRefiningKnown) { + def isRefining(using Context): Boolean = + if (!isRefiningKnown) isRefiningCache = annot.symbol.derivesFrom(defn.RefiningAnnotationClass) isRefiningKnown = true - } isRefiningCache - } // equals comes from case class; no matching override is needed @@ -5342,7 +4996,6 @@ object Types { override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: AnnotatedType => parent.equals(that.parent, bs) && (annot eql that.annot) case _ => false - } class CachedAnnotatedType(parent: Type, annot: Annotation) extends AnnotatedType(parent, annot) @@ -5356,49 +5009,43 @@ object Types { // Special type objects and classes ----------------------------------------------------- /** The type of an erased array */ - abstract case class JavaArrayType(elemType: Type) extends CachedGroundType with ValueType { + abstract case class JavaArrayType(elemType: Type) extends CachedGroundType with ValueType: def derivedJavaArrayType(elemtp: Type)(using Context): JavaArrayType = if (elemtp eq this.elemType) this else JavaArrayType(elemtp) override def computeHash(bs: Binders): Int = doHash(bs, elemType) override def hashIsStable: Boolean = elemType.hashIsStable - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: JavaArrayType => elemType.eq(that.elemType) case _ => false - } - } final class CachedJavaArrayType(elemType: Type) extends JavaArrayType(elemType) - object JavaArrayType { + object JavaArrayType: def apply(elemType: Type)(using Context): JavaArrayType = unique(new CachedJavaArrayType(elemType)) - } /** The type of an import clause tree */ case class ImportType(expr: Tree) extends UncachedGroundType /** Sentinel for "missing type" */ - @sharable case object NoType extends CachedGroundType { + @sharable case object NoType extends CachedGroundType: override def computeHash(bs: Binders): Int = hashSeed - } /** Missing prefix */ - @sharable case object NoPrefix extends CachedGroundType { + @sharable case object NoPrefix extends CachedGroundType: override def computeHash(bs: Binders): Int = hashSeed - } /** A common superclass of `ErrorType` and `TryDynamicCallSite`. Instances of this * class are at the same time subtypes and supertypes of every other type. */ abstract class FlexType extends UncachedGroundType with ValueType - abstract class ErrorType extends FlexType { + abstract class ErrorType extends FlexType: /** An explanation of the cause of the failure */ def msg(using Context): Message /** An explanation of the cause of the failure as a string */ def explanation(using Context): String = msg.message - } object ErrorType: def apply(m: Message)(using Context): ErrorType = @@ -5413,15 +5060,14 @@ object Types { case Some(m) => m case None => em"error message from previous run no longer available" - object UnspecifiedErrorType extends ErrorType { + object UnspecifiedErrorType extends ErrorType: override def msg(using Context): Message = em"unspecified error" - } /* Type used to track Select nodes that could not resolve a member and their qualifier is a scala.Dynamic. */ object TryDynamicCallType extends FlexType /** Wildcard type, possibly with bounds */ - abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermType { + abstract case class WildcardType(optBounds: Type) extends CachedGroundType with TermType: def effectiveBounds(using Context): TypeBounds = optBounds match case bounds: TypeBounds => bounds @@ -5435,22 +5081,19 @@ object Types { override def computeHash(bs: Binders): Int = doHash(bs, optBounds) override def hashIsStable: Boolean = optBounds.hashIsStable - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: WildcardType => optBounds.eq(that.optBounds) case _ => false - } // equals comes from case class; no matching override is needed - override def iso(that: Any, bs: BinderPairs): Boolean = that match { + override def iso(that: Any, bs: BinderPairs): Boolean = that match case that: WildcardType => optBounds.equals(that.optBounds, bs) case _ => false - } - } final class CachedWildcardType(optBounds: Type) extends WildcardType(optBounds) - @sharable object WildcardType extends WildcardType(NoType) { + @sharable object WildcardType extends WildcardType(NoType): def apply(bounds: TypeBounds)(using Context): WildcardType = if bounds eq TypeBounds.empty then val result = ctx.base.emptyWildcardBounds @@ -5463,7 +5106,6 @@ object Types { /** A wildcard matching any type of the same kind as `tp`. */ def sameKindAs(tp: Type)(using Context): WildcardType = apply(TypeBounds.emptySameKindAs(tp)) - } /** An extractor for single abstract method types. * A type is a SAM type if it is a reference to a class or trait, which @@ -5476,14 +5118,13 @@ object Types { * The pattern `SAMType(sam)` matches a SAM type, where `sam` is the * type of the single abstract method. */ - object SAMType { - def zeroParamClass(tp: Type)(using Context): Type = tp match { + object SAMType: + def zeroParamClass(tp: Type)(using Context): Type = tp match case tp: ClassInfo => - def zeroParams(tp: Type): Boolean = tp.stripPoly match { + def zeroParams(tp: Type): Boolean = tp.stripPoly match case mt: MethodType => mt.paramInfos.isEmpty && !mt.resultType.isInstanceOf[MethodType] case et: ExprType => true case _ => false - } // `ContextFunctionN` does not have constructors val ctor = tp.cls.primaryConstructor if (!ctor.exists || zeroParams(ctor.info)) tp @@ -5502,19 +5143,17 @@ object Types { zeroParamClass(tp.underlying) case _ => NoType - } - def isInstantiatable(tp: Type)(using Context): Boolean = zeroParamClass(tp) match { + def isInstantiatable(tp: Type)(using Context): Boolean = zeroParamClass(tp) match case cinfo: ClassInfo if !cinfo.cls.isOneOf(FinalOrSealed) => val selfType = cinfo.selfType.asSeenFrom(tp, cinfo.cls) tp <:< selfType case _ => false - } def unapply(tp: Type)(using Context): Option[MethodType] = - if (isInstantiatable(tp)) { + if (isInstantiatable(tp)) val absMems = tp.possibleSamMethods if (absMems.size == 1) - absMems.head.info match { + absMems.head.info match case mt: MethodType if !mt.isParamDependent && !defn.isContextFunctionType(mt.resultType) => val cls = tp.classSymbol @@ -5535,28 +5174,24 @@ object Types { // above example we get: // // (x: String): Int - val approxParams = new ApproximatingTypeMap { - def apply(tp: Type): Type = tp match { + val approxParams = new ApproximatingTypeMap: + def apply(tp: Type): Type = tp match case tp: TypeRef if tp.symbol.isAllOf(ClassTypeParam) && tp.symbol.owner == cls => - tp.info match { + tp.info match case info: AliasingBounds => mapOver(info.alias) case TypeBounds(lo, hi) => range(atVariance(-variance)(apply(lo)), apply(hi)) case _ => range(defn.NothingType, defn.AnyType) // should happen only in error cases - } case _ => mapOver(tp) - } - } val approx = if ctx.owner.isContainedIn(cls) then mt else approxParams(mt).asInstanceOf[MethodType] Some(approx) case _ => None - } else if (tp isRef defn.PartialFunctionClass) // To maintain compatibility with 2.x, we treat PartialFunction specially, // pretending it is a SAM type. In the future it would be better to merge @@ -5566,9 +5201,7 @@ object Types { // case clauses. absMems.find(_.symbol.name == nme.apply).map(_.info.asInstanceOf[MethodType]) else None - } else None - } // ----- TypeMaps -------------------------------------------------------------------- @@ -5582,13 +5215,12 @@ object Types { abstract class VariantTraversal: protected[dotc] var variance: Int = 1 - inline protected def atVariance[T](v: Int)(op: => T): T = { + inline protected def atVariance[T](v: Int)(op: => T): T = val saved = variance variance = v val res = op variance = saved res - } protected def stopAt: StopAt = StopAt.Static @@ -5710,12 +5342,12 @@ object Types { finally variance = saved /** Map this function over given type */ - def mapOver(tp: Type): Type = { + def mapOver(tp: Type): Type = record(s"TypeMap mapOver ${getClass}") record("TypeMap mapOver total") val ctx = this.mapCtx // optimization for performance given Context = ctx - tp match { + tp match case tp: NamedType => if stopBecauseStaticOrLocal(tp) then tp else @@ -5815,19 +5447,16 @@ object Types { case _ => tp - } - } private def treeTypeMap = new TreeTypeMap(typeMap = this) def mapOver(syms: List[Symbol]): List[Symbol] = mapSymbols(syms, treeTypeMap) - def mapOver(scope: Scope): Scope = { + def mapOver(scope: Scope): Scope = val elems = scope.toList val elems1 = mapOver(elems) if (elems1 eq elems) scope else newScopeWith(elems1: _*) - } def mapOver(tree: Tree): Tree = treeTypeMap(tree) @@ -5835,28 +5464,23 @@ object Types { protected def mapClassInfo(tp: ClassInfo): Type = derivedClassInfo(tp, this(tp.prefix)) - def andThen(f: Type => Type): TypeMap = new TypeMap { + def andThen(f: Type => Type): TypeMap = new TypeMap: override def stopAt = thisMap.stopAt def apply(tp: Type) = f(thisMap(tp)) - } } /** A type map that maps also parents and self type of a ClassInfo */ - abstract class DeepTypeMap(using Context) extends TypeMap { - override def mapClassInfo(tp: ClassInfo): ClassInfo = { + abstract class DeepTypeMap(using Context) extends TypeMap: + override def mapClassInfo(tp: ClassInfo): ClassInfo = val prefix1 = this(tp.prefix) val parents1 = tp.declaredParents mapConserve this - val selfInfo1: TypeOrSymbol = tp.selfInfo match { + val selfInfo1: TypeOrSymbol = tp.selfInfo match case selfInfo: Type => this(selfInfo) case selfInfo => selfInfo - } tp.derivedClassInfo(prefix1, parents1, tp.decls, selfInfo1) - } - } - @sharable object IdentityTypeMap extends TypeMap()(NoContext) { + @sharable object IdentityTypeMap extends TypeMap()(NoContext): def apply(tp: Type): Type = tp - } /** A type map that approximates TypeBounds types depending on * variance. @@ -5875,20 +5499,17 @@ object Types { protected def emptyRange = range(defn.NothingType, defn.AnyType) - protected def lower(tp: Type): Type = tp match { + protected def lower(tp: Type): Type = tp match case tp: Range => tp.lo case _ => tp - } - protected def upper(tp: Type): Type = tp match { + protected def upper(tp: Type): Type = tp match case tp: Range => tp.hi case _ => tp - } - protected def rangeToBounds(tp: Type): Type = tp match { + protected def rangeToBounds(tp: Type): Type = tp match case Range(lo, hi) => TypeBounds(lo, hi) case _ => tp - } private var expandingBounds: Boolean = false @@ -5915,10 +5536,10 @@ object Types { /** Try to widen a named type to its info relative to given prefix `pre`, where possible. * The possible cases are listed inline in the code. */ - def tryWiden(tp: NamedType, pre: Type): Type = pre.member(tp.name) match { + def tryWiden(tp: NamedType, pre: Type): Type = pre.member(tp.name) match case d: SingleDenotation => val tp1 = d.info.dealiasKeepAnnots - tp1.stripAnnots match { + tp1.stripAnnots match case TypeAlias(alias) => // if H#T = U, then for any x in L..H, x.T =:= U, // hence we can replace with U under all variances @@ -5933,31 +5554,27 @@ object Types { useAlternate(info) case _ => NoType - } case _ => NoType - } /** Expand parameter reference corresponding to prefix `pre`; * If the expansion is a wildcard parameter reference, convert its * underlying bounds to a range, otherwise return the expansion. */ def expandParam(tp: NamedType, pre: Type): Type = - tp.argForParam(pre) match { + tp.argForParam(pre) match case arg @ TypeRef(pre, _) if pre.isArgPrefixOf(arg.symbol) => - arg.info match { + arg.info match case argInfo: TypeBounds => expandBounds(argInfo) case argInfo => useAlternate(arg) - } case arg: TypeBounds => expandBounds(arg) case arg => useAlternate(arg) - } /** Derived selection. * @pre the (upper bound of) prefix `pre` has a member named `tp.name`. */ override protected def derivedSelect(tp: NamedType, pre: Type): Type = if (pre eq tp.prefix) tp - else pre match { + else pre match case Range(preLo, preHi) => val forwarded = if (tp.symbol.isAllOf(ClassTypeParam)) expandParam(tp, preHi) @@ -5965,22 +5582,20 @@ object Types { forwarded.orElse( range(super.derivedSelect(tp, preLo).loBound, super.derivedSelect(tp, preHi).hiBound)) case _ => - super.derivedSelect(tp, pre) match { + super.derivedSelect(tp, pre) match case TypeBounds(lo, hi) => range(lo, hi) case tp => tp - } - } override protected def derivedRefinedType(tp: RefinedType, parent: Type, info: Type): Type = if ((parent eq tp.parent) && (info eq tp.refinedInfo)) tp - else parent match { + else parent match case Range(parentLo, parentHi) => range(derivedRefinedType(tp, parentLo, info), derivedRefinedType(tp, parentHi, info)) case _ => def propagate(lo: Type, hi: Type) = range(derivedRefinedType(tp, parent, lo), derivedRefinedType(tp, parent, hi)) if (parent.isExactlyNothing) parent - else info match { + else info match case Range(infoLo: TypeBounds, infoHi: TypeBounds) => assert(variance == 0) if (!infoLo.isTypeAlias && !infoHi.isTypeAlias) propagate(infoLo, infoHi) @@ -5989,24 +5604,20 @@ object Types { propagate(infoLo, infoHi) case _ => tp.derivedRefinedType(parent, tp.refinedName, info) - } - } override protected def derivedRecType(tp: RecType, parent: Type): Type = if (parent eq tp.parent) tp - else parent match { + else parent match case Range(lo, hi) => range(tp.rebind(lo), tp.rebind(hi)) case _ => tp.rebind(parent) - } override protected def derivedAlias(tp: AliasingBounds, alias: Type): Type = if (alias eq tp.alias) tp - else alias match { + else alias match case Range(lo, hi) => if (variance > 0) TypeBounds(lo, hi) else range(tp.derivedAlias(lo), tp.derivedAlias(hi)) case _ => tp.derivedAlias(alias) - } override protected def derivedTypeBounds(tp: TypeBounds, lo: Type, hi: Type): Type = if ((lo eq tp.lo) && (hi eq tp.hi)) tp @@ -6020,7 +5631,7 @@ object Types { else tp.derivedSuperType(thistp, supertp) override protected def derivedAppliedType(tp: AppliedType, tycon: Type, args: List[Type]): Type = - tycon match { + tycon match case Range(tyconLo, tyconHi) => range(derivedAppliedType(tp, tyconLo, args), derivedAppliedType(tp, tyconHi, args)) case _ => @@ -6041,21 +5652,19 @@ object Types { // Fail for non-variant argument ranges (see use-site else branch below). // If successful, the L-arguments are in loBut, the H-arguments in hiBuf. // @return operation succeeded for all arguments. - def distributeArgs(args: List[Type], tparams: List[ParamInfo]): Boolean = args match { + def distributeArgs(args: List[Type], tparams: List[ParamInfo]): Boolean = args match case Range(lo, hi) :: args1 => val v = tparams.head.paramVarianceSign if (v == 0) false - else { + else if (v > 0) { loBuf += lo; hiBuf += hi } else { loBuf += hi; hiBuf += lo } distributeArgs(args1, tparams.tail) - } case arg :: args1 => loBuf += arg; hiBuf += arg distributeArgs(args1, tparams.tail) case nil => true - } if (distributeArgs(args, tyconTypeParams(tp))) range(tp.derivedAppliedType(tycon, loBuf.toList), tp.derivedAppliedType(tycon, hiBuf.toList)) @@ -6065,7 +5674,6 @@ object Types { // See lampepfl/dotty#14152 range(defn.NothingType, tp.derivedAppliedType(tycon, args.map(rangeToBounds))) else tp.derivedAppliedType(tycon, args) - } private def isRangeOfNonTermTypes(tp: Type): Boolean = tp match case Range(lo, hi) => !lo.isInstanceOf[TermType] || !hi.isInstanceOf[TermType] @@ -6080,13 +5688,12 @@ object Types { else tp.derivedOrType(tp1, tp2) override protected def derivedAnnotatedType(tp: AnnotatedType, underlying: Type, annot: Annotation): Type = - underlying match { + underlying match case Range(lo, hi) => range(tp.derivedAnnotatedType(lo, annot), tp.derivedAnnotatedType(hi, annot)) case _ => if (underlying.isExactlyNothing) underlying else tp.derivedAnnotatedType(underlying, annot) - } override protected def derivedCapturingType(tp: Type, parent: Type, refs: CaptureSet): Type = parent match // TODO ^^^ handle ranges in capture sets as well case Range(lo, hi) => @@ -6120,15 +5727,14 @@ object Types { // a range. else range(defn.NothingType, info) - override protected def derivedClassInfo(tp: ClassInfo, pre: Type): Type = { + override protected def derivedClassInfo(tp: ClassInfo, pre: Type): Type = assert(!isRange(pre)) // we don't know what to do here; this case has to be handled in subclasses // (typically by handling ClassInfo's specially, in case they can be encountered). tp.derivedClassInfo(pre) - } override protected def derivedLambdaType(tp: LambdaType)(formals: List[tp.PInfo], restpe: Type): Type = - restpe match { + restpe match case Range(lo, hi) => range(derivedLambdaType(tp)(formals, lo), derivedLambdaType(tp)(formals, hi)) case _ => @@ -6138,7 +5744,6 @@ object Types { derivedLambdaType(tp)(formals.map(lower(_).asInstanceOf[tp.PInfo]), restpe)) else tp.derivedLambdaType(tp.paramNames, formals, restpe) - } /** Overridden in TypeOps.avoid */ protected def needsRangeIfInvariant(refs: CaptureSet): Boolean = true @@ -6171,7 +5776,7 @@ object Types { // ----- TypeAccumulators ---------------------------------------------------- abstract class TypeAccumulator[T](implicit protected val accCtx: Context) - extends VariantTraversal with ((T, Type) => T) { + extends VariantTraversal with ((T, Type) => T): def apply(x: T, tp: Type): T @@ -6202,14 +5807,12 @@ object Types { case tp @ AppliedType(tycon, args) => @tailrec def foldArgs(x: T, tparams: List[ParamInfo], args: List[Type]): T = if (args.isEmpty || tparams.isEmpty) x - else { + else val tparam = tparams.head - val acc = args.head match { + val acc = args.head match case arg: TypeBounds => this(x, arg) case arg => atVariance(variance * tparam.paramVarianceSign)(this(x, arg)) - } foldArgs(acc, tparams.tail, args.tail) - } foldArgs(this(x, tycon), tyconTypeParams(tp), args) case _: BoundType | _: ThisType => x @@ -6233,12 +5836,11 @@ object Types { case bounds @ TypeBounds(lo, hi) => if (lo eq hi) atVariance(0)(this(x, lo)) - else { + else variance = -variance val y = this(x, lo) variance = -variance this(y, hi) - } case tp: AndType => this(this(x, tp.tp1), tp.tp2) @@ -6285,19 +5887,16 @@ object Types { this(x, tp.parent) case _ => x - }} + }} - @tailrec final def foldOver(x: T, ts: List[Type]): T = ts match { + @tailrec final def foldOver(x: T, ts: List[Type]): T = ts match case t :: ts1 => foldOver(apply(x, t), ts1) case nil => x - } - } - abstract class TypeTraverser(using Context) extends TypeAccumulator[Unit] { + abstract class TypeTraverser(using Context) extends TypeAccumulator[Unit]: def traverse(tp: Type): Unit def apply(x: Unit, tp: Type): Unit = traverse(tp) protected def traverseChildren(tp: Type): Unit = foldOver((), tp) - } class ExistsAccumulator( p: Type => Boolean, @@ -6306,9 +5905,8 @@ object Types { def apply(x: Boolean, tp: Type): Boolean = x || p(tp) || (forceLazy || !tp.isInstanceOf[LazyRef]) && foldOver(x, tp) - class ForeachAccumulator(p: Type => Unit, override val stopAt: StopAt)(using Context) extends TypeAccumulator[Unit] { + class ForeachAccumulator(p: Type => Unit, override val stopAt: StopAt)(using Context) extends TypeAccumulator[Unit]: def apply(x: Unit, tp: Type): Unit = foldOver(p(tp), tp) - } class NamedPartsAccumulator(p: NamedType => Boolean)(using Context) extends TypeAccumulator[List[NamedType]]: @@ -6339,24 +5937,21 @@ object Types { foldOver(xs, tp) end NamedPartsAccumulator - class isGroundAccumulator(using Context) extends TypeAccumulator[Boolean] { - def apply(x: Boolean, tp: Type): Boolean = x && { - tp match { + class isGroundAccumulator(using Context) extends TypeAccumulator[Boolean]: + def apply(x: Boolean, tp: Type): Boolean = x `&&`: + tp match case _: TypeParamRef => false case tp: TypeVar => apply(x, tp.underlying) case tp: AppliedType => tp.isGround(this) case _ => foldOver(x, tp) - } - } - } - class TypeSizeAccumulator(using Context) extends TypeAccumulator[Int] { + class TypeSizeAccumulator(using Context) extends TypeAccumulator[Int]: var seen = util.HashSet[Type](initialCapacity = 8) def apply(n: Int, tp: Type): Int = if seen.contains(tp) then n - else { + else seen += tp - tp match { + tp match case tp: AppliedType => foldOver(n + 1, tp) case tp: RefinedType => @@ -6367,17 +5962,14 @@ object Types { apply(n, TypeComparer.bounds(tp)) case _ => foldOver(n, tp) - } - } - } - class CoveringSetAccumulator(using Context) extends TypeAccumulator[Set[Symbol]] { + class CoveringSetAccumulator(using Context) extends TypeAccumulator[Set[Symbol]]: var seen = util.HashSet[Type](initialCapacity = 8) def apply(cs: Set[Symbol], tp: Type): Set[Symbol] = if seen.contains(tp) then cs - else { + else seen += tp - tp match { + tp match case tp if tp.isExactlyAny || tp.isExactlyNothing => cs case tp: AppliedType => @@ -6395,9 +5987,6 @@ object Types { apply(cs, TypeComparer.bounds(tp)) case other => foldOver(cs, tp) - } - } - } // ----- Name Filters -------------------------------------------------- @@ -6408,76 +5997,64 @@ object Types { * * keep(pre, name) implies keep(C.this, name) */ - abstract class NameFilter { + abstract class NameFilter: def apply(pre: Type, name: Name)(using Context): Boolean /** Filter does not need to be rechecked with full prefix, if it * has been already checked for the class denotation of the prefix */ def isStable: Boolean - } /** A filter for names of abstract types of a given type */ - object abstractTypeNameFilter extends NameFilter { + object abstractTypeNameFilter extends NameFilter: def apply(pre: Type, name: Name)(using Context): Boolean = - name.isTypeName && { + name.isTypeName `&&`: val mbr = pre.nonPrivateMember(name) mbr.symbol.is(Deferred) && mbr.info.isInstanceOf[RealTypeBounds] - } def isStable = false - } /** A filter for names of abstract types of a given type */ - object nonClassTypeNameFilter extends NameFilter { + object nonClassTypeNameFilter extends NameFilter: def apply(pre: Type, name: Name)(using Context): Boolean = - name.isTypeName && { + name.isTypeName `&&`: val mbr = pre.member(name) mbr.symbol.isType && !mbr.symbol.isClass - } def isStable = false - } /** A filter for names of deferred term definitions of a given type */ - object abstractTermNameFilter extends NameFilter { + object abstractTermNameFilter extends NameFilter: def apply(pre: Type, name: Name)(using Context): Boolean = name.isTermName && pre.nonPrivateMember(name).hasAltWith(_.symbol.is(Deferred)) def isStable = false - } /** A filter for names of type aliases of a given type */ - object typeAliasNameFilter extends NameFilter { + object typeAliasNameFilter extends NameFilter: def apply(pre: Type, name: Name)(using Context): Boolean = - name.isTypeName && { + name.isTypeName `&&`: val mbr = pre.nonPrivateMember(name) mbr.symbol.isAliasType - } def isStable = false - } - object typeNameFilter extends NameFilter { + object typeNameFilter extends NameFilter: def apply(pre: Type, name: Name)(using Context): Boolean = name.isTypeName def isStable = true - } - object fieldFilter extends NameFilter { + object fieldFilter extends NameFilter: def apply(pre: Type, name: Name)(using Context): Boolean = name.isTermName && (pre member name).hasAltWith(!_.symbol.is(Method)) def isStable = true - } - object takeAllFilter extends NameFilter { + object takeAllFilter extends NameFilter: def apply(pre: Type, name: Name)(using Context): Boolean = name != nme.CONSTRUCTOR def isStable = true - } - object implicitFilter extends NameFilter { + object implicitFilter extends NameFilter: /** A dummy filter method. * Implicit filtering is handled specially in computeMemberNames, so * no post-filtering is needed. */ def apply(pre: Type, name: Name)(using Context): Boolean = true def isStable = true - } // ----- Debug --------------------------------------------------------- @@ -6486,28 +6063,24 @@ object Types { val watchList: List[TypeName] = List[String]( ) map (_.toTypeName) - def isWatched(tp: Type)(using Context): Boolean = tp match { + def isWatched(tp: Type)(using Context): Boolean = tp match case ref: TypeRef => watchList contains ref.name case _ => false - } // ----- Helpers and Decorator implicits -------------------------------------- implicit def decorateTypeApplications(tpe: Type): TypeApplications = new TypeApplications(tpe) - extension (tps1: List[Type]) { + extension (tps1: List[Type]) @tailrec def hashIsStable: Boolean = tps1.isEmpty || tps1.head.hashIsStable && tps1.tail.hashIsStable @tailrec def equalElements(tps2: List[Type], bs: BinderPairs): Boolean = - (tps1 `eq` tps2) || { + (tps1 `eq` tps2) `||`: if (tps1.isEmpty) tps2.isEmpty else tps2.nonEmpty && tps1.head.equals(tps2.head, bs) && tps1.tail.equalElements(tps2.tail, bs) - } - } private val keepAlways: AnnotatedType => Context ?=> Boolean = _ => true private val keepNever: AnnotatedType => Context ?=> Boolean = _ => false private val keepIfRefining: AnnotatedType => Context ?=> Boolean = _.isRefining val isBounds: Type => Boolean = _.isInstanceOf[TypeBounds] -} diff --git a/compiler/src/dotty/tools/dotc/core/Variances.scala b/compiler/src/dotty/tools/dotc/core/Variances.scala index 2401b43c8e17..ddad0971eb98 100644 --- a/compiler/src/dotty/tools/dotc/core/Variances.scala +++ b/compiler/src/dotty/tools/dotc/core/Variances.scala @@ -5,7 +5,7 @@ import Types._, Contexts._, Flags._, Symbols._, Annotations._ import TypeApplications.TypeParamInfo import Decorators._ -object Variances { +object Variances: type Variance = FlagSet val Bivariant: Variance = VarianceFlags @@ -30,13 +30,12 @@ object Variances { def setStructuralVariances(lam: HKTypeLambda)(using Context): Unit = assert(!lam.isDeclaredVarianceLambda) for param <- lam.typeParams do param.storedVariance = Bivariant - object narrowVariances extends TypeTraverser { + object narrowVariances extends TypeTraverser: def traverse(t: Type): Unit = t match case t: TypeParamRef if t.binder eq lam => lam.typeParams(t.paramNum).storedVariance &= varianceFromInt(variance) case _ => traverseChildren(t) - } // Note: Normally, we'd need to repeat `traverse` until a fixpoint is reached. // But since recursive lambdas can only appear in bounds, and bounds never have // structural variances, a single traversal is enough. @@ -79,4 +78,3 @@ object Variances { else "invariant" val alwaysInvariant: Any => Invariant.type = Function.const(Invariant) -} diff --git a/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala b/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala index 46581d00714e..b4c373353250 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/AbstractFileReader.scala @@ -17,7 +17,7 @@ import io.AbstractFile * @author Philippe Altherr * @version 1.0, 23/03/2004 */ -final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { +final class AbstractFileReader(val buf: Array[Byte]) extends DataReader: def this(file: AbstractFile) = this(file.toByteArray) /** the current input pointer @@ -29,9 +29,8 @@ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { def getByte(mybp: Int): Byte = buf(mybp) - def getBytes(mybp: Int, bytes: Array[Byte]): Unit = { + def getBytes(mybp: Int, bytes: Array[Byte]): Unit = System.arraycopy(buf, mybp, bytes, 0, bytes.length) - } /** return byte at offset 'pos' */ @@ -41,18 +40,16 @@ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { /** read a byte */ @throws(classOf[IndexOutOfBoundsException]) - def nextByte: Byte = { + def nextByte: Byte = val b = buf(bp) bp += 1 b - } /** read some bytes */ - def nextBytes(len: Int): Array[Byte] = { + def nextBytes(len: Int): Array[Byte] = bp += len buf.slice(bp - len, bp) - } /** read a character */ @@ -96,5 +93,4 @@ final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { /** skip next 'n' bytes */ def skip(n: Int): Unit = { bp += n } -} diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala b/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala index acd7982f4fd3..3186b08a6554 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ByteCodecs.scala @@ -7,65 +7,55 @@ \* */ package dotty.tools.dotc.core.classfile -object ByteCodecs { +object ByteCodecs: - def avoidZero(src: Array[Byte]): Array[Byte] = { + def avoidZero(src: Array[Byte]): Array[Byte] = var i = 0 val srclen = src.length var count = 0 - while (i < srclen) { + while (i < srclen) if (src(i) == 0x7f) count += 1 i += 1 - } val dst = new Array[Byte](srclen + count) i = 0 var j = 0 - while (i < srclen) { + while (i < srclen) val in = src(i) - if (in == 0x7f) { + if (in == 0x7f) dst(j) = (0xc0).toByte dst(j + 1) = (0x80).toByte j += 2 - } - else { + else dst(j) = (in + 1).toByte j += 1 - } i += 1 - } dst - } - def regenerateZero(src: Array[Byte]): Int = { + def regenerateZero(src: Array[Byte]): Int = var i = 0 val srclen = src.length var j = 0 - while (i < srclen) { + while (i < srclen) val in: Int = src(i) & 0xff - if (in == 0xc0 && (src(i + 1) & 0xff) == 0x80) { + if (in == 0xc0 && (src(i + 1) & 0xff) == 0x80) src(j) = 0x7f i += 2 - } - else if (in == 0) { + else if (in == 0) src(j) = 0x7f i += 1 - } - else { + else src(j) = (in - 1).toByte i += 1 - } j += 1 - } j - } - def encode8to7(src: Array[Byte]): Array[Byte] = { + def encode8to7(src: Array[Byte]): Array[Byte] = val srclen = src.length val dstlen = (srclen * 8 + 6) / 7 val dst = new Array[Byte](dstlen) var i = 0 var j = 0 - while (i + 6 < srclen) { + while (i + 6 < srclen) var in: Int = src(i) & 0xff dst(j) = (in & 0x7f).toByte var out: Int = in >>> 7 @@ -90,46 +80,38 @@ object ByteCodecs { dst(j + 7) = out.toByte i += 7 j += 8 - } - if (i < srclen) { + if (i < srclen) var in: Int = src(i) & 0xff dst(j) = (in & 0x7f).toByte; j += 1 var out: Int = in >>> 7 - if (i + 1 < srclen) { + if (i + 1 < srclen) in = src(i + 1) & 0xff dst(j) = (out | (in << 1) & 0x7f).toByte; j += 1 out = in >>> 6 - if (i + 2 < srclen) { + if (i + 2 < srclen) in = src(i + 2) & 0xff dst(j) = (out | (in << 2) & 0x7f).toByte; j += 1 out = in >>> 5 - if (i + 3 < srclen) { + if (i + 3 < srclen) in = src(i + 3) & 0xff dst(j) = (out | (in << 3) & 0x7f).toByte; j += 1 out = in >>> 4 - if (i + 4 < srclen) { + if (i + 4 < srclen) in = src(i + 4) & 0xff dst(j) = (out | (in << 4) & 0x7f).toByte; j += 1 out = in >>> 3 - if (i + 5 < srclen) { + if (i + 5 < srclen) in = src(i + 5) & 0xff dst(j) = (out | (in << 5) & 0x7f).toByte; j += 1 out = in >>> 2 - } - } - } - } - } if (j < dstlen) dst(j) = out.toByte - } dst - } - def decode7to8(src: Array[Byte], srclen: Int): Int = { + def decode7to8(src: Array[Byte], srclen: Int): Int = var i = 0 var j = 0 val dstlen = (srclen * 7 + 7) / 8 - while (i + 7 < srclen) { + while (i + 7 < srclen) var out: Int = src(i) var in: Byte = src(i + 1) src(j) = (out | (in & 0x01) << 7).toByte @@ -153,43 +135,34 @@ object ByteCodecs { src(j + 6) = (out | in << 1).toByte i += 8 j += 7 - } - if (i < srclen) { + if (i < srclen) var out: Int = src(i) - if (i + 1 < srclen) { + if (i + 1 < srclen) var in: Byte = src(i + 1) src(j) = (out | (in & 0x01) << 7).toByte; j += 1 out = in >>> 1 - if (i + 2 < srclen) { + if (i + 2 < srclen) in = src(i + 2) src(j) = (out | (in & 0x03) << 6).toByte; j += 1 out = in >>> 2 - if (i + 3 < srclen) { + if (i + 3 < srclen) in = src(i + 3) src(j) = (out | (in & 0x07) << 5).toByte; j += 1 out = in >>> 3 - if (i + 4 < srclen) { + if (i + 4 < srclen) in = src(i + 4) src(j) = (out | (in & 0x0f) << 4).toByte; j += 1 out = in >>> 4 - if (i + 5 < srclen) { + if (i + 5 < srclen) in = src(i + 5) src(j) = (out | (in & 0x1f) << 3).toByte; j += 1 out = in >>> 5 - if (i + 6 < srclen) { + if (i + 6 < srclen) in = src(i + 6) src(j) = (out | (in & 0x3f) << 2).toByte; j += 1 out = in >>> 6 - } - } - } - } - } - } if (j < dstlen) src(j) = out.toByte - } dstlen - } def encode(xs: Array[Byte]): Array[Byte] = avoidZero(encode8to7(xs)) @@ -209,11 +182,9 @@ object ByteCodecs { * * However, this does not always happen. */ - def decode(xs: Array[Byte]): Int = { + def decode(xs: Array[Byte]): Int = val len = regenerateZero(xs) decode7to8(xs, len) - } -} diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala index 4aa60d973264..72a6f76e7a31 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileConstants.scala @@ -4,7 +4,7 @@ package classfile import scala.annotation.switch -object ClassfileConstants { +object ClassfileConstants: inline val JAVA_MAGIC = 0xCAFEBABE inline val JAVA_MAJOR_VERSION = 45 @@ -332,12 +332,12 @@ object ClassfileConstants { inline val impdep2 = 0xff import Flags._ - abstract class FlagTranslation { + abstract class FlagTranslation: protected def baseFlags(jflags: Int): FlagSet = EmptyFlags protected def isClass: Boolean = false - private def translateFlag(jflag: Int): FlagSet = (jflag: @switch) match { + private def translateFlag(jflag: Int): FlagSet = (jflag: @switch) match case JAVA_ACC_PRIVATE => Private case JAVA_ACC_PROTECTED => Protected case JAVA_ACC_FINAL => Final @@ -348,12 +348,11 @@ object ClassfileConstants { case JAVA_ACC_INTERFACE => PureInterfaceCreationFlags | JavaDefined case JAVA_ACC_ANNOTATION => JavaAnnotation case _ => EmptyFlags - } private def addFlag(base: FlagSet, jflag: Int): FlagSet = if (jflag == 0) base else base | translateFlag(jflag) - private def translateFlags(jflags: Int, baseFlags: FlagSet): FlagSet = { + private def translateFlags(jflags: Int, baseFlags: FlagSet): FlagSet = var res: FlagSet = baseFlags | JavaDefined res = addFlag(res, jflags & JAVA_ACC_PRIVATE) res = addFlag(res, jflags & JAVA_ACC_PROTECTED) @@ -365,17 +364,11 @@ object ClassfileConstants { res = addFlag(res, jflags & JAVA_ACC_INTERFACE) res = addFlag(res, jflags & JAVA_ACC_ANNOTATION) res - } def flags(jflags: Int): FlagSet = translateFlags(jflags, baseFlags(jflags)) - } - val classTranslation: FlagTranslation = new FlagTranslation { + val classTranslation: FlagTranslation = new FlagTranslation: override def isClass = true - } - val fieldTranslation: FlagTranslation = new FlagTranslation { + val fieldTranslation: FlagTranslation = new FlagTranslation: override def baseFlags(jflags: Int) = if ((jflags & JAVA_ACC_FINAL) == 0) Mutable else EmptyFlags - } - val methodTranslation: FlagTranslation = new FlagTranslation { + val methodTranslation: FlagTranslation = new FlagTranslation: override def baseFlags(jflags: Int) = if ((jflags & JAVA_ACC_BRIDGE) != 0) Bridge else EmptyFlags - } -} diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala index 71e00f985584..93552b8fc0bf 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ClassfileParser.scala @@ -24,7 +24,7 @@ import typer.Checking.checkNonCyclic import io.{AbstractFile, ZipArchive} import scala.util.control.NonFatal -object ClassfileParser { +object ClassfileParser: /** Marker trait for unpicklers that can be embedded in classfiles. */ trait Embedded @@ -33,8 +33,8 @@ object ClassfileParser { object NoEmbedded extends Embedded /** Replace raw types with wildcard applications */ - def cook(using Context): TypeMap = new TypeMap { - def apply(tp: Type): Type = tp match { + def cook(using Context): TypeMap = new TypeMap: + def apply(tp: Type): Type = tp match case tp: TypeRef if tp.symbol.typeParams.nonEmpty => AppliedType(tp, tp.symbol.typeParams.map(Function.const(TypeBounds.empty))) case tp @ AppliedType(tycon, args) => @@ -48,14 +48,11 @@ object ClassfileParser { if (parents eq parents1) tp else tp.copy(parentTypes = parents1) case _ => mapOver(tp) - } - } -} class ClassfileParser( classfile: AbstractFile, classRoot: ClassDenotation, - moduleRoot: ClassDenotation)(ictx: Context) { + moduleRoot: ClassDenotation)(ictx: Context): import ClassfileConstants._ import ClassfileParser._ @@ -88,15 +85,14 @@ class ClassfileParser( this.pool = null res } - catch { + catch case e: RuntimeException => if (ctx.debug) e.printStackTrace() throw new IOException( i"""class file ${classfile.canonicalPath} is broken, reading aborted with ${e.getClass} |${Option(e.getMessage).getOrElse("")}""") - } - private def parseHeader()(using in: DataReader): Unit = { + private def parseHeader()(using in: DataReader): Unit = val magic = in.nextInt if (magic != JAVA_MAGIC) throw new IOException(s"class file '${classfile}' has wrong magic number 0x${toHexString(magic)}, should be 0x${toHexString(JAVA_MAGIC)}") @@ -107,7 +103,6 @@ class ClassfileParser( (minorVersion < JAVA_MINOR_VERSION))) throw new IOException( s"class file '${classfile}' has unknown version $majorVersion.$minorVersion, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") - } /** Return the class symbol of the given name. */ def classNameToSymbol(name: Name)(using Context): Symbol = @@ -146,7 +141,7 @@ class ClassfileParser( var sawPrivateConstructor: Boolean = false - def parseClass()(using ctx: Context, in: DataReader): Option[Embedded] = { + def parseClass()(using ctx: Context, in: DataReader): Option[Embedded] = val jflags = in.nextChar val isAnnotation = hasAnnotation(jflags) val sflags = classTranslation.flags(jflags) @@ -163,15 +158,14 @@ class ClassfileParser( /** Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled. * Updates the read pointer of 'in'. */ - def parseParents: List[Type] = { + def parseParents: List[Type] = val superType = if (classRoot.symbol == defn.ComparableClass || classRoot.symbol == defn.JavaCloneableClass || - classRoot.symbol == defn.JavaSerializableClass) { + classRoot.symbol == defn.JavaSerializableClass) // Treat these interfaces as universal traits in.nextChar defn.AnyType - } else pool.getSuperClass(in.nextChar).typeRef val ifaceCount = in.nextChar @@ -183,11 +177,10 @@ class ClassfileParser( // is found. Previously, this worked because of weak conformance, which has been dropped. superType :: ifaces - } val result = unpickleOrParseInnerClasses() - if (!result.isDefined) { + if (!result.isDefined) var classInfo: Type = TempClassInfoType(parseParents, instanceScope, classRoot.symbol) // might be reassigned by later parseAttributes val staticInfo = TempClassInfoType(List(), staticScope, moduleRoot.symbol) @@ -219,34 +212,29 @@ class ClassfileParser( setClassInfo(classRoot, classInfo, fromScala2 = false) NamerOps.addConstructorProxies(moduleRoot.classSymbol) - } else if (result == Some(NoEmbedded)) - for (sym <- List(moduleRoot.sourceModule, moduleRoot.symbol, classRoot.symbol)) { + for (sym <- List(moduleRoot.sourceModule, moduleRoot.symbol, classRoot.symbol)) classRoot.owner.asClass.delete(sym) sym.markAbsent() - } result - } /** Add type parameters of enclosing classes */ - def addEnclosingTParams()(using Context): Unit = { + def addEnclosingTParams()(using Context): Unit = var sym = classRoot.owner - while (sym.isClass && !sym.is(Flags.ModuleClass)) { + while (sym.isClass && !sym.is(Flags.ModuleClass)) for (tparam <- sym.typeParams) classTParams = classTParams.updated(tparam.name, tparam) sym = sym.owner - } - } - def parseMember(method: Boolean)(using ctx: Context, in: DataReader): Unit = { + def parseMember(method: Boolean)(using ctx: Context, in: DataReader): Unit = val start = indexCoord(in.bp) val jflags = in.nextChar val sflags = if (method) Flags.Method | methodTranslation.flags(jflags) else fieldTranslation.flags(jflags) val preName = pool.getName(in.nextChar) - if (!sflags.isOneOf(Flags.PrivateOrArtifact) || preName.name == nme.CONSTRUCTOR) { + if (!sflags.isOneOf(Flags.PrivateOrArtifact) || preName.name == nme.CONSTRUCTOR) val sig = pool.getExternalName(in.nextChar).value val completer = MemberCompleter(preName.name, jflags, sig) val member = newSymbol( @@ -257,17 +245,14 @@ class ClassfileParser( getScope(jflags).enter(member) - } - else { + else in.nextChar // info skipAttributes() - } - } - class MemberCompleter(name: SimpleName, jflags: Int, sig: String) extends LazyType { + class MemberCompleter(name: SimpleName, jflags: Int, sig: String) extends LazyType: var attrCompleter: AttributeCompleter = null - def complete(denot: SymDenotation)(using Context): Unit = { + def complete(denot: SymDenotation)(using Context): Unit = val sym = denot.symbol val isEnum = (jflags & JAVA_ACC_ENUM) != 0 val isNative = (jflags & JAVA_ACC_NATIVE) != 0 @@ -278,35 +263,31 @@ class ClassfileParser( /** Strip leading outer param from constructor and trailing access tag for * private inner constructors. */ - def normalizeConstructorParams() = innerClasses.get(currentClassName.toString) match { + def normalizeConstructorParams() = innerClasses.get(currentClassName.toString) match case Some(entry) if !isStatic(entry.jflags) => val mt @ MethodTpe(paramNames, paramTypes, resultType) = denot.info: @unchecked var normalizedParamNames = paramNames.tail var normalizedParamTypes = paramTypes.tail - if ((jflags & JAVA_ACC_SYNTHETIC) != 0) { + if ((jflags & JAVA_ACC_SYNTHETIC) != 0) // SI-7455 strip trailing dummy argument ("access constructor tag") from synthetic constructors which // are added when an inner class needs to access a private constructor. normalizedParamNames = paramNames.dropRight(1) normalizedParamTypes = paramTypes.dropRight(1) - } denot.info = mt.derivedLambdaType(normalizedParamNames, normalizedParamTypes, resultType) case _ => - } /** Make return type of constructor be the enclosing class type, * and make constructor type polymorphic in the type parameters of the class */ - def normalizeConstructorInfo() = { + def normalizeConstructorInfo() = val rt = classRoot.typeRef appliedTo (classRoot.typeParams map (_.typeRef)) - def resultType(tpe: Type): Type = tpe match { + def resultType(tpe: Type): Type = tpe match case mt @ MethodType(paramNames) => mt.derivedLambdaType(paramNames, mt.paramInfos, rt) case pt : PolyType => pt.derivedLambdaType(pt.paramNames, pt.paramInfos, resultType(pt.resType)) - } denot.info = resultType(denot.info) addConstructorTypeParams(denot) - } val isVarargs = denot.is(Flags.Method) && (jflags & JAVA_ACC_VARARGS) != 0 denot.info = sigToType(sig, isVarargs = isVarargs) @@ -323,21 +304,17 @@ class ClassfileParser( if (ctx.explicitNulls) denot.info = JavaNullInterop.nullifyMember(denot.symbol, denot.info, isEnum) // seal java enums - if (isEnum) { + if (isEnum) val enumClass = sym.owner.linkedClass if (!enumClass.exists) report.warning(em"no linked class for java enum $sym in ${sym.owner}. A referencing class file might be missing an InnerClasses entry.") - else { + else if (!enumClass.is(Flags.Sealed)) enumClass.setFlag(Flags.AbstractSealed) enumClass.addAnnotation(Annotation.Child(sym, NoSpan)) - } - } - } - } def constantTagToType(tag: Int)(using Context): Type = - (tag: @switch) match { + (tag: @switch) match case BYTE_TAG => defn.ByteType case CHAR_TAG => defn.CharType case DOUBLE_TAG => defn.DoubleType @@ -347,7 +324,6 @@ class ClassfileParser( case SHORT_TAG => defn.ShortType case VOID_TAG => defn.UnitType case BOOL_TAG => defn.BooleanType - } /** As specified in https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.7.16.1, * an annotation argument of type boolean, byte, char or short will @@ -357,30 +333,27 @@ class ClassfileParser( * conversion in the `values` cache, because the same constant might * be used for annotation arguments of different type. */ - def convertTo(ct: Constant, pt: Type)(using Context): Constant = { + def convertTo(ct: Constant, pt: Type)(using Context): Constant = if (pt eq defn.BooleanType) && ct.tag == IntTag then Constant(ct.value != 0) else ct.convertTo(pt) - } - private def sigToType(sig: String, owner: Symbol = null, isVarargs: Boolean = false)(using Context): Type = { + private def sigToType(sig: String, owner: Symbol = null, isVarargs: Boolean = false)(using Context): Type = var index = 0 val end = sig.length - def accept(ch: Char): Unit = { + def accept(ch: Char): Unit = assert(sig(index) == ch, (sig(index), ch)) index += 1 - } - def subName(isDelimiter: Char => Boolean): SimpleName = { + def subName(isDelimiter: Char => Boolean): SimpleName = val start = index while (!isDelimiter(sig(index))) { index += 1 } termName(sig.slice(start, index)) - } // Warning: sigToType contains nested completers which might be forced in a later run! // So local methods need their own ctx parameters. - def sig2type(tparams: immutable.Map[Name, Symbol], skiptvs: Boolean)(using Context): Type = { + def sig2type(tparams: immutable.Map[Name, Symbol], skiptvs: Boolean)(using Context): Type = val tag = sig(index); index += 1 - (tag: @switch) match { + (tag: @switch) match case 'L' => /** A type representation where inner classes become `A#B` instead of `A.this.B` (like with `typeRef`) * @@ -394,16 +367,16 @@ class ClassfileParser( else throw new RuntimeException("unexpected term symbol " + symbol) - def processTypeArgs(tp: Type): Type = tp match { + def processTypeArgs(tp: Type): Type = tp match case tp: TypeRef => - if (sig(index) == '<') { + if (sig(index) == '<') accept('<') val argsBuf = if (skiptvs) null else new ListBuffer[Type] - while (sig(index) != '>') { - val arg = sig(index) match { + while (sig(index) != '>') + val arg = sig(index) match case variance @ ('+' | '-' | '*') => index += 1 - variance match { + variance match case '+' => TypeBounds.upper(sig2type(tparams, skiptvs)) case '-' => val argTp = sig2type(tparams, skiptvs) @@ -412,29 +385,23 @@ class ClassfileParser( if (argTp.typeSymbol == defn.AnyClass) TypeBounds.upper(defn.FromJavaObjectType) else TypeBounds(argTp, defn.FromJavaObjectType) case '*' => TypeBounds.upper(defn.FromJavaObjectType) - } case _ => sig2type(tparams, skiptvs) - } if (argsBuf != null) argsBuf += arg - } accept('>') if (skiptvs) tp else AppliedType(tp, argsBuf.toList) - } else tp case tp => assert(sig(index) != '<', tp) tp - } val classSym = classNameToSymbol(subName(c => c == ';' || c == '<')) val classTpe = if (classSym eq defn.ObjectClass) defn.FromJavaObjectType else innerType(classSym) var tpe = processTypeArgs(classTpe) - while (sig(index) == '.') { + while (sig(index) == '.') accept('.') val name = subName(c => c == ';' || c == '<' || c == '.').toTypeName val tp = tpe.select(name) tpe = processTypeArgs(tp) - } accept(';') tpe case ARRAY_TAG => @@ -468,7 +435,7 @@ class ClassfileParser( var paramnames = new ListBuffer[TermName]() while !isMethodEnd(index) do paramnames += nme.syntheticParamName(paramtypes.length) - paramtypes += { + paramtypes `+=`: if isRepeatedParam(index) then index += 1 val elemType = sig2type(tparams, skiptvs = false) @@ -476,7 +443,6 @@ class ClassfileParser( defn.RepeatedParamType.appliedTo(elemType) else sig2type(tparams, skiptvs = false) - } index += 1 val restype = sig2type(tparams, skiptvs = false) @@ -488,51 +454,42 @@ class ClassfileParser( if (skiptvs) defn.AnyType else tparams(n).typeRef case tag => constantTagToType(tag) - } - } // sig2type(tparams, skiptvs) - def sig2typeBounds(tparams: immutable.Map[Name, Symbol], skiptvs: Boolean)(using Context): Type = { + def sig2typeBounds(tparams: immutable.Map[Name, Symbol], skiptvs: Boolean)(using Context): Type = val ts = new ListBuffer[Type] - while (sig(index) == ':') { + while (sig(index) == ':') index += 1 - if (sig(index) != ':') { // guard against empty class bound + if (sig(index) != ':') // guard against empty class bound val tp = sig2type(tparams, skiptvs) if (!skiptvs) ts += cook(tp) - } - } - if (!skiptvs) { + if (!skiptvs) val bound = if ts.isEmpty then defn.AnyType else ts.reduceLeft(AndType.apply) TypeBounds.upper(bound) - } else NoType - } var tparams = classTParams - def typeParamCompleter(start: Int) = new LazyType { - def complete(denot: SymDenotation)(using Context): Unit = { + def typeParamCompleter(start: Int) = new LazyType: + def complete(denot: SymDenotation)(using Context): Unit = val savedIndex = index - try { + try index = start denot.info = checkNonCyclic( // we need the checkNonCyclic call to insert LazyRefs for F-bounded cycles denot.symbol, sig2typeBounds(tparams, skiptvs = false), reportErrors = false) - } finally index = savedIndex - } - } val newTParams = new ListBuffer[Symbol]() - if (sig(index) == '<') { + if (sig(index) == '<') assert(owner != null) index += 1 val start = index - while (sig(index) != '>') { + while (sig(index) != '>') val tpname = subName(':'.==).toTypeName val s = newSymbol( owner, tpname, owner.typeParamCreationFlags, @@ -541,34 +498,28 @@ class ClassfileParser( tparams = tparams + (tpname -> s) sig2typeBounds(tparams, skiptvs = true) newTParams += s - } index += 1 - } val ownTypeParams = newTParams.toList.asInstanceOf[List[TypeSymbol]] val tpe = if ((owner == null) || !owner.isClass) sig2type(tparams, skiptvs = false) - else { + else classTParams = tparams val parents = new ListBuffer[Type]() while (index < end) parents += sig2type(tparams, skiptvs = false) // here the variance doesn't matter TempClassInfoType(parents.toList, instanceScope, owner) - } if (ownTypeParams.isEmpty) tpe else TempPolyType(ownTypeParams, tpe) - } // sigToType - class EnumTag(sig: String, name: NameOrString) { - def toTree(using ctx: Context): untpd.Tree = { + class EnumTag(sig: String, name: NameOrString): + def toTree(using ctx: Context): untpd.Tree = val enumClassTp = sigToType(sig) val enumModuleClass = enumClassTp.classSymbol.companionModule val tmref = TermRef(enumModuleClass.termRef, name.name) untpd.TypedSplice(ref(tmref)) - } - } - def parseAnnotArg(skip: Boolean = false)(using ctx: Context, in: DataReader): Option[untpd.Tree | EnumTag] = { + def parseAnnotArg(skip: Boolean = false)(using ctx: Context, in: DataReader): Option[untpd.Tree | EnumTag] = // If we encounter an empty array literal, we need the type of the corresponding // parameter to properly type it, but that would require forcing the annotation @@ -581,14 +532,13 @@ class ClassfileParser( val tag = in.nextByte.toChar val index = in.nextChar - tag match { + tag match case STRING_TAG => if (skip) None else Some(lit(Constant(pool.getName(index).value))) case BOOL_TAG | BYTE_TAG | CHAR_TAG | SHORT_TAG => - if (skip) None else { + if (skip) None else val constant = convertTo(pool.getConstant(index), constantTagToType(tag)) Some(lit(constant)) - } case INT_TAG | LONG_TAG | FLOAT_TAG | DOUBLE_TAG => if (skip) None else Some(lit(pool.getConstant(index))) case CLASS_TAG => @@ -601,28 +551,23 @@ class ClassfileParser( val arr = new ArrayBuffer[untpd.Tree]() var hasError = false for (i <- 0 until index) - parseAnnotArg(skip) match { + parseAnnotArg(skip) match case Some(c: untpd.Tree) => arr += c case Some(tag: EnumTag) => arr += tag.toTree case None => hasError = true - } if (hasError) None else if (skip) None - else { + else val elems = arr.toList Some(untpd.JavaSeqLiteral(elems, untpd.TypeTree())) - } case ANNOTATION_TAG => parseAnnotation(index, skip).map(_.untpdTree) - } - } - class ClassfileAnnotation(annotType: Type, lazyArgs: List[(NameOrString, untpd.Tree | EnumTag)]) extends LazyAnnotation { + class ClassfileAnnotation(annotType: Type, lazyArgs: List[(NameOrString, untpd.Tree | EnumTag)]) extends LazyAnnotation: private def args(using Context): List[untpd.Tree] = - lazyArgs.map { + lazyArgs.map: case (name, tree: untpd.Tree) => untpd.NamedArg(name.name, tree).withSpan(NoSpan) case (name, tag: EnumTag) => untpd.NamedArg(name.name, tag.toTree).withSpan(NoSpan) - } protected var mySym: Symbol | (Context ?=> Symbol) = (ctx: Context) ?=> annotType.classSymbol @@ -632,26 +577,23 @@ class ClassfileParser( def untpdTree(using Context): untpd.Tree = untpd.New(untpd.TypeTree(annotType), List(args)) - } /** Parse and return a single annotation. If it is malformed, * return None. */ - def parseAnnotation(attrNameIndex: Char, skip: Boolean = false)(using ctx: Context, in: DataReader): Option[ClassfileAnnotation] = try { + def parseAnnotation(attrNameIndex: Char, skip: Boolean = false)(using ctx: Context, in: DataReader): Option[ClassfileAnnotation] = try val attrType = pool.getType(attrNameIndex.toInt) val nargs = in.nextChar.toInt val argbuf = new ListBuffer[(NameOrString, untpd.Tree | EnumTag)] var hasError = false - for (i <- 0 until nargs) { + for (i <- 0 until nargs) val name = pool.getName(in.nextChar) - parseAnnotArg(skip) match { + parseAnnotArg(skip) match case Some(arg) => argbuf += name -> arg case None => hasError = !skip - } - } attrType match case tp: TypeRef if tp.denot.infoOrCompleter.isInstanceOf[StubInfo] => // Silently ignore missing annotation classes like javac @@ -661,8 +603,7 @@ class ClassfileParser( case _ => if (hasError || skip) None else Some(ClassfileAnnotation(attrType, argbuf.toList)) - } - catch { + catch case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found case NonFatal(ex) => // We want to be robust when annotations are unavailable, so the very least @@ -675,7 +616,6 @@ class ClassfileParser( if (ctx.debug) ex.printStackTrace() None // ignore malformed annotations - } /** A completer for attributes * @@ -685,21 +625,20 @@ class ClassfileParser( * contain references to the constant pool, where the constants are loaded * lazily. */ - class AttributeCompleter(sym: Symbol) { + class AttributeCompleter(sym: Symbol): var sig: String = null var constant: Constant = null var exceptions: List[NameOrString] = Nil var annotations: List[Annotation] = Nil var namedParams: Map[Int, TermName] = Map.empty - def complete(tp: Type, isVarargs: Boolean = false)(using Context): Type = { + def complete(tp: Type, isVarargs: Boolean = false)(using Context): Type = val updatedType = if sig == null then tp - else { + else val newType = sigToType(sig, sym, isVarargs) if (ctx.debug && ctx.verbose) println("" + sym + "; signature = " + sig + " type = " + newType) newType - } val newType = if this.constant != null then @@ -722,17 +661,15 @@ class ClassfileParser( case _ => t cook.apply(fillInParamNames(newType)) - } - } - def parseAttributes(sym: Symbol)(using ctx: Context, in: DataReader): AttributeCompleter = { + def parseAttributes(sym: Symbol)(using ctx: Context, in: DataReader): AttributeCompleter = val res = new AttributeCompleter(sym) - def parseAttribute(): Unit = { + def parseAttribute(): Unit = val attrName = pool.getName(in.nextChar).name.toTypeName val attrLen = in.nextInt val end = in.bp + attrLen - attrName match { + attrName match case tpnme.SignatureATTR => val sig = pool.getExternalName(in.nextChar) res.sig = sig.value @@ -746,9 +683,8 @@ class ClassfileParser( case tpnme.DeprecatedATTR => val msg = Literal(Constant("see corresponding Javadoc for more information.")) val since = Literal(Constant("")) - res.annotations ::= Annotation.deferredSymAndTree(defn.DeprecatedAnnot) { + res.annotations ::= Annotation.deferredSymAndTree(defn.DeprecatedAnnot): New(defn.DeprecatedAnnot.typeRef, msg :: since :: Nil) - } case tpnme.ConstantValueATTR => val c = pool.getConstant(in.nextChar) @@ -785,49 +721,41 @@ class ClassfileParser( case tpnme.CodeATTR => in.skip(attrLen) // flag test will trigger completion and cycles, thus have to be lazy - if (sym.owner.flagsUNSAFE.isAllOf(Flags.JavaInterface)) { + if (sym.owner.flagsUNSAFE.isAllOf(Flags.JavaInterface)) sym.resetFlag(Flags.Deferred) sym.owner.resetFlag(Flags.PureInterface) report.log(s"$sym in ${sym.owner} is a java 8+ default method.") - } case _ => - } in.bp = end - } /** * Parse the "Exceptions" attribute which denotes the exceptions * thrown by a method. */ - def parseExceptions(len: Int): Unit = { + def parseExceptions(len: Int): Unit = val nClasses = in.nextChar - for (n <- 0 until nClasses) { + for (n <- 0 until nClasses) // FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (SI-7065) val cls = pool.getClassName(in.nextChar.toInt) res.exceptions ::= cls - } - } /** Parse a sequence of annotations and attaches them to the * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */ - def parseAnnotations(len: Int): Unit = { + def parseAnnotations(len: Int): Unit = val nAttr = in.nextChar for (n <- 0 until nAttr) - parseAnnotation(in.nextChar) match { + parseAnnotation(in.nextChar) match case Some(annot) => sym.addAnnotation(annot) case None => - } - } // begin parseAttributes for (i <- 0 until in.nextChar) parseAttribute() res - } /** Annotations in Scala are assumed to get all their arguments as constructor * parameters. For Java annotations we need to fake it by making up the constructor. @@ -840,19 +768,17 @@ class ClassfileParser( info = new AnnotConstructorCompleter(classInfo) ).entered - class AnnotConstructorCompleter(classInfo: TempClassInfoType) extends LazyType { - def complete(denot: SymDenotation)(using Context): Unit = { + class AnnotConstructorCompleter(classInfo: TempClassInfoType) extends LazyType: + def complete(denot: SymDenotation)(using Context): Unit = val attrs = classInfo.decls.toList.filter(sym => sym.isTerm && sym != denot.symbol && sym.name != nme.CONSTRUCTOR) val paramNames = attrs.map(_.name.asTermName) val paramTypes = attrs.map(_.info.resultType) denot.info = MethodType(paramNames, paramTypes, classRoot.typeRef) - } - } /** Enter own inner classes in the right scope. It needs the scopes to be set up, * and implicitly current class' superclasses. */ - private def enterOwnInnerClasses()(using Context, DataReader): Unit = { + private def enterOwnInnerClasses()(using Context, DataReader): Unit = def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile, jflags: Int) = SymbolLoaders.enterClassAndModule( getOwner(jflags), @@ -864,11 +790,9 @@ class ClassfileParser( for entry <- innerClasses.valuesIterator do // create a new class member for immediate inner classes if entry.outer.name == currentClassName then - val file = ctx.platform.classPath.findClassFile(entry.externalName.toString) getOrElse { + val file = ctx.platform.classPath.findClassFile(entry.externalName.toString) getOrElse: throw new AssertionError(entry.externalName) - } enterClassAndModule(entry, file, entry.jflags) - } // Nothing$ and Null$ were incorrectly emitted with a Scala attribute // instead of ScalaSignature before 2.13.0-M2, see https://github.com/scala/scala/pull/5952 @@ -878,123 +802,107 @@ class ClassfileParser( * Restores the old `bp`. * @return Some(unpickler) iff classfile is from Scala, so no Java info needs to be read. */ - def unpickleOrParseInnerClasses()(using ctx: Context, in: DataReader): Option[Embedded] = { + def unpickleOrParseInnerClasses()(using ctx: Context, in: DataReader): Option[Embedded] = val oldbp = in.bp - try { + try skipSuperclasses() skipMembers() // fields skipMembers() // methods val attrs = in.nextChar val attrbp = in.bp - def scan(target: TypeName): Boolean = { + def scan(target: TypeName): Boolean = in.bp = attrbp var i = 0 - while (i < attrs && pool.getName(in.nextChar).name.toTypeName != target) { + while (i < attrs && pool.getName(in.nextChar).name.toTypeName != target) val attrLen = in.nextInt in.skip(attrLen) i += 1 - } i < attrs - } - def unpickleScala(bytes: Array[Byte]): Some[Embedded] = { + def unpickleScala(bytes: Array[Byte]): Some[Embedded] = val allowed = ctx.settings.Yscala2Unpickler.value def failUnless(cond: Boolean) = assert(cond, s"Unpickling ${classRoot.symbol.showLocated} from ${classRoot.symbol.associatedFile} is not allowed with -Yscala2-unpickler $allowed") - if (allowed != "always") { + if (allowed != "always") failUnless(allowed != "never") val allowedList = allowed.split(java.io.File.pathSeparator).toList val file = classRoot.symbol.associatedFile // Using `.toString.contains` isn't great, but it's good enough for a debug flag. failUnless(file == null || allowedList.exists(path => file.toString.contains(path))) - } val unpickler = new unpickleScala2.Scala2Unpickler(bytes, classRoot, moduleRoot)(ctx) withMode(Scala2UnpicklingMode)(unpickler.run()) Some(unpickler) - } - def unpickleTASTY(bytes: Array[Byte]): Some[Embedded] = { + def unpickleTASTY(bytes: Array[Byte]): Some[Embedded] = val unpickler = new tasty.DottyUnpickler(bytes) unpickler.enter(roots = Set(classRoot, moduleRoot, moduleRoot.sourceModule))(using ctx.withSource(util.NoSource)) Some(unpickler) - } - def parseScalaSigBytes: Array[Byte] = { + def parseScalaSigBytes: Array[Byte] = val tag = in.nextByte.toChar assert(tag == STRING_TAG, tag) pool getBytes in.nextChar - } - def parseScalaLongSigBytes: Array[Byte] = { + def parseScalaLongSigBytes: Array[Byte] = val tag = in.nextByte.toChar assert(tag == ARRAY_TAG, tag) val stringCount = in.nextChar val entries = - for (i <- 0 until stringCount) yield { + for (i <- 0 until stringCount) yield val stag = in.nextByte.toChar assert(stag == STRING_TAG, stag) in.nextChar.toInt - } pool.getBytes(entries.toList) - } - if (scan(tpnme.TASTYATTR)) { + if (scan(tpnme.TASTYATTR)) val attrLen = in.nextInt val bytes = in.nextBytes(attrLen) - if (attrLen == 16) { // A tasty attribute with that has only a UUID (16 bytes) implies the existence of the .tasty file - val tastyBytes: Array[Byte] = classfile match { // TODO: simplify when #3552 is fixed + if (attrLen == 16) // A tasty attribute with that has only a UUID (16 bytes) implies the existence of the .tasty file + val tastyBytes: Array[Byte] = classfile match // TODO: simplify when #3552 is fixed case classfile: io.ZipArchive#Entry => // We are in a jar val path = classfile.parent.lookupName( classfile.name.stripSuffix(".class") + ".tasty", directory = false ) - if (path != null) { + if (path != null) val stream = path.input - try { + try val tastyOutStream = new ByteArrayOutputStream() val buffer = new Array[Byte](1024) var read = stream.read(buffer, 0, buffer.length) - while (read != -1) { + while (read != -1) tastyOutStream.write(buffer, 0, read) read = stream.read(buffer, 0, buffer.length) - } tastyOutStream.flush() tastyOutStream.toByteArray - } finally { + finally stream.close() - } - } - else { + else report.error(em"Could not find $path in ${classfile.underlyingSource}") Array.empty - } case _ => val dir = classfile.container val name = classfile.name.stripSuffix(".class") + ".tasty" val tastyFileOrNull = dir.lookupName(name, false) - if (tastyFileOrNull == null) { + if (tastyFileOrNull == null) report.error(em"Could not find TASTY file $name under $dir") Array.empty - } else + else tastyFileOrNull.toByteArray - } - if (tastyBytes.nonEmpty) { + if (tastyBytes.nonEmpty) val reader = new TastyReader(bytes, 0, 16) val expectedUUID = new UUID(reader.readUncompressedLong(), reader.readUncompressedLong()) val tastyUUID = new TastyHeaderUnpickler(tastyBytes).readHeader() if (expectedUUID != tastyUUID) report.warning(s"$classfile is out of sync with its TASTy file. Loaded TASTy file. Try cleaning the project to fix this issue", NoSourcePosition) return unpickleTASTY(tastyBytes) - } - } else // Before 3.0.0 we had a mode where we could embed the TASTY bytes in the classfile. This has not been supported in any stable release. report.error(s"Found a TASTY attribute with a length different from 16 in $classfile. This is likely a bug in the compiler. Please report.", NoSourcePosition) - } if scan(tpnme.ScalaATTR) && !scalaUnpickleWhitelist.contains(classRoot.name) && !(classRoot.name.startsWith("Tuple") && classRoot.name.endsWith("$sp")) @@ -1010,91 +918,78 @@ class ClassfileParser( // attribute isn't, this classfile is a compilation artifact. return Some(NoEmbedded) - if (scan(tpnme.ScalaSignatureATTR) && scan(tpnme.RuntimeVisibleAnnotationATTR)) { + if (scan(tpnme.ScalaSignatureATTR) && scan(tpnme.RuntimeVisibleAnnotationATTR)) val attrLen = in.nextInt val nAnnots = in.nextChar var i = 0 - while (i < nAnnots) { + while (i < nAnnots) val attrClass = pool.getType(in.nextChar).typeSymbol val nArgs = in.nextChar var j = 0 - while (j < nArgs) { + while (j < nArgs) val argName = pool.getName(in.nextChar) - if (argName.name == nme.bytes) { + if (argName.name == nme.bytes) if attrClass == defn.ScalaSignatureAnnot then return unpickleScala(parseScalaSigBytes) else if attrClass == defn.ScalaLongSignatureAnnot then return unpickleScala(parseScalaLongSigBytes) - } parseAnnotArg(skip = true) j += 1 - } i += 1 - } - } - if (scan(tpnme.InnerClassesATTR)) { + if (scan(tpnme.InnerClassesATTR)) val attrLen = in.nextInt val entries = in.nextChar.toInt - for (i <- 0 until entries) { + for (i <- 0 until entries) val innerIndex = in.nextChar val outerIndex = in.nextChar val nameIndex = in.nextChar val jflags = in.nextChar - if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0) { + if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0) val inner = pool.getClassName(innerIndex) val outer = pool.getClassName(outerIndex) val name = pool.getName(nameIndex) val entry = InnerClassEntry(inner, outer, name, jflags) innerClasses(inner.value) = entry - } - } - } None - } finally in.bp = oldbp - } /** An entry in the InnerClasses attribute of this class file. */ - case class InnerClassEntry(external: NameOrString, outer: NameOrString, name: NameOrString, jflags: Int) { + case class InnerClassEntry(external: NameOrString, outer: NameOrString, name: NameOrString, jflags: Int): def externalName = external.value def outerName = outer.value def originalName = name.name // The name of the outer class, without its trailing $ if it has one. def strippedOuter = outer.name.stripModuleClassSuffix - } - private object innerClasses extends util.HashMap[String, InnerClassEntry] { + private object innerClasses extends util.HashMap[String, InnerClassEntry]: /** Return the Symbol of the top level class enclosing `name`, * or 'name's symbol if no entry found for `name`. */ - def topLevelClass(name: String)(using Context): Symbol = { - val tlName = if (contains(name)) { + def topLevelClass(name: String)(using Context): Symbol = + val tlName = if (contains(name)) var entry = this(name) while (contains(entry.outerName)) entry = this(entry.outerName) entry.outerName - } else name classNameToSymbol(tlName.toTypeName) - } /** Return the class symbol for `entry`. It looks it up in its outer class. * This might force outer class symbols. */ - def classSymbol(entry: InnerClassEntry)(using Context): Symbol = { + def classSymbol(entry: InnerClassEntry)(using Context): Symbol = def getMember(sym: Symbol, name: Name)(using Context): Symbol = if (isStatic(entry.jflags)) if (sym == classRoot.symbol) staticScope.lookup(name) - else { + else var moduleClass = sym.registeredCompanion if (!moduleClass.exists && sym.isAbsent()) moduleClass = sym.scalacLinkedClass moduleClass.info.member(name).symbol - } else if (sym == classRoot.symbol) instanceScope.lookup(name) else if (sym == classRoot.symbol.owner && name == classRoot.name) @@ -1123,28 +1018,21 @@ class ClassfileParser( |owner.fullName = ${owner.showFullName} |while parsing ${classfile}""") result - } - } - def skipAttributes()(using in: DataReader): Unit = { + def skipAttributes()(using in: DataReader): Unit = val attrCount = in.nextChar - for (i <- 0 until attrCount) { + for (i <- 0 until attrCount) in.skip(2); in.skip(in.nextInt) - } - } - def skipMembers()(using in: DataReader): Unit = { + def skipMembers()(using in: DataReader): Unit = val memberCount = in.nextChar - for (i <- 0 until memberCount) { + for (i <- 0 until memberCount) in.skip(6); skipAttributes() - } - } - def skipSuperclasses()(using in: DataReader): Unit = { + def skipSuperclasses()(using in: DataReader): Unit = in.skip(2) // superclass val ifaces = in.nextChar in.skip(2 * ifaces) - } protected def getOwner(flags: Int): Symbol = if (isStatic(flags)) moduleRoot.symbol else classRoot.symbol @@ -1162,13 +1050,11 @@ class ClassfileParser( private def isStatic(flags: Int) = (flags & JAVA_ACC_STATIC) != 0 private def hasAnnotation(flags: Int) = (flags & JAVA_ACC_ANNOTATION) != 0 - protected class NameOrString(val value: String) { + protected class NameOrString(val value: String): private var _name: SimpleName = null - def name: SimpleName = { + def name: SimpleName = if (_name eq null) _name = termName(value) _name - } - } def getClassSymbol(name: SimpleName)(using Context): Symbol = if (name.endsWith("$") && (name ne nme.nothingRuntimeClass) && (name ne nme.nullRuntimeClass)) @@ -1176,17 +1062,17 @@ class ClassfileParser( requiredModule(name.dropRight(1)) else classNameToSymbol(name) - class ConstantPool(using in: DataReader) { + class ConstantPool(using in: DataReader): private val len = in.nextChar private val starts = new Array[Int](len) private val values = new Array[AnyRef](len) private val internalized = new Array[NameOrString](len) { var i = 1 - while (i < starts.length) { + while (i < starts.length) starts(i) = in.bp i += 1 - (in.nextByte.toInt: @switch) match { + (in.nextByte.toInt: @switch) match case CONSTANT_UTF8 | CONSTANT_UNICODE => in.skip(in.nextChar) case CONSTANT_CLASS | CONSTANT_STRING | CONSTANT_METHODTYPE => @@ -1202,16 +1088,14 @@ class ClassfileParser( i += 1 case _ => errorBadTag(in.bp - 1) - } - } } /** Return the name found at given index. */ - def getName(index: Int)(using in: DataReader): NameOrString = { + def getName(index: Int)(using in: DataReader): NameOrString = if (index <= 0 || len <= index) errorBadIndex(index) - values(index) match { + values(index) match case name: NameOrString => name case null => val start = starts(index) @@ -1220,11 +1104,9 @@ class ClassfileParser( val name = new NameOrString(in.getUTF(start + 1, len + 2)) values(index) = name name - } - } /** Return the name found at given index in the constant pool, with '/' replaced by '.'. */ - def getExternalName(index: Int)(using in: DataReader): NameOrString = { + def getExternalName(index: Int)(using in: DataReader): NameOrString = if (index <= 0 || len <= index) errorBadIndex(index) @@ -1232,73 +1114,63 @@ class ClassfileParser( internalized(index) = new NameOrString(getName(index).value.replace('/', '.')) internalized(index) - } - def getClassSymbol(index: Int)(using ctx: Context, in: DataReader): Symbol = { + def getClassSymbol(index: Int)(using ctx: Context, in: DataReader): Symbol = if (index <= 0 || len <= index) errorBadIndex(index) var c = values(index).asInstanceOf[Symbol] - if (c eq null) { + if (c eq null) val start = starts(index) if (in.getByte(start).toInt != CONSTANT_CLASS) errorBadTag(start) val name = getExternalName(in.getChar(start + 1)) c = ClassfileParser.this.getClassSymbol(name.name) values(index) = c - } c - } /** Return the external name of the class info structure found at 'index'. * Use 'getClassSymbol' if the class is sure to be a top-level class. */ - def getClassName(index: Int)(using in: DataReader): NameOrString = { + def getClassName(index: Int)(using in: DataReader): NameOrString = val start = starts(index) if (in.getByte(start).toInt != CONSTANT_CLASS) errorBadTag(start) getExternalName(in.getChar(start + 1)) - } /** Return the type of a class constant entry. Since * arrays are considered to be class types, they might * appear as entries in 'newarray' or 'cast' opcodes. */ - def getClassOrArrayType(index: Int)(using ctx: Context, in: DataReader): Type = { + def getClassOrArrayType(index: Int)(using ctx: Context, in: DataReader): Type = if (index <= 0 || len <= index) errorBadIndex(index) val value = values(index) var c: Type = null - if (value eq null) { + if (value eq null) val start = starts(index) if (in.getByte(start).toInt != CONSTANT_CLASS) errorBadTag(start) val name = getExternalName(in.getChar(start + 1)) - if (name.value.charAt(0) == ARRAY_TAG) { + if (name.value.charAt(0) == ARRAY_TAG) c = sigToType(name.value) values(index) = c - } - else { + else val sym = classNameToSymbol(name.name) values(index) = sym c = sym.typeRef - } - } - else c = value match { + else c = value match case tp: Type => tp case cls: Symbol => cls.typeRef - } c - } def getType(index: Int, isVarargs: Boolean = false)(using Context, DataReader): Type = sigToType(getExternalName(index).value, isVarargs = isVarargs) - def getSuperClass(index: Int)(using Context, DataReader): Symbol = { + def getSuperClass(index: Int)(using Context, DataReader): Symbol = assert(index != 0, "attempt to parse java.lang.Object from classfile") getClassSymbol(index) - } - def getConstant(index: Int)(using ctx: Context, in: DataReader): Constant = { + def getConstant(index: Int)(using ctx: Context, in: DataReader): Constant = if (index <= 0 || len <= index) errorBadIndex(index) var value = values(index) - if (value eq null) { + if (value eq null) val start = starts(index) - value = (in.getByte(start).toInt: @switch) match { + value = (in.getByte(start).toInt: @switch) match case CONSTANT_STRING => Constant(getName(in.getChar(start + 1).toInt).value) case CONSTANT_INTEGER => @@ -1313,27 +1185,22 @@ class ClassfileParser( getClassOrArrayType(index).typeSymbol case _ => errorBadTag(start) - } values(index) = value - } - value match { + value match case ct: Constant => ct case cls: Symbol => Constant(cls.typeRef) case arr: Type => Constant(arr) - } - } - private def getSubArray(bytes: Array[Byte]): Array[Byte] = { + private def getSubArray(bytes: Array[Byte]): Array[Byte] = val decodedLength = ByteCodecs.decode(bytes) val arr = new Array[Byte](decodedLength) System.arraycopy(bytes, 0, arr, 0, decodedLength) arr - } - def getBytes(index: Int)(using in: DataReader): Array[Byte] = { + def getBytes(index: Int)(using in: DataReader): Array[Byte] = if (index <= 0 || len <= index) errorBadIndex(index) var value = values(index).asInstanceOf[Array[Byte]] - if (value eq null) { + if (value eq null) val start = starts(index) if (in.getByte(start).toInt != CONSTANT_UTF8) errorBadTag(start) val len = in.getChar(start + 1) @@ -1341,16 +1208,14 @@ class ClassfileParser( in.getBytes(start + 3, bytes) value = getSubArray(bytes) values(index) = value - } value - } - def getBytes(indices: List[Int])(using in: DataReader): Array[Byte] = { + def getBytes(indices: List[Int])(using in: DataReader): Array[Byte] = assert(!indices.isEmpty, indices) var value = values(indices.head).asInstanceOf[Array[Byte]] - if (value eq null) { + if (value eq null) val bytesBuffer = ArrayBuffer.empty[Byte] - for (index <- indices) { + for (index <- indices) if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index) val start = starts(index) if (in.getByte(start).toInt != CONSTANT_UTF8) errorBadTag(start) @@ -1358,12 +1223,9 @@ class ClassfileParser( val buf = new Array[Byte](len) in.getBytes(start + 3, buf) bytesBuffer ++= buf - } value = getSubArray(bytesBuffer.toArray) values(indices.head) = value - } value - } /** Throws an exception signaling a bad constant index. */ private def errorBadIndex(index: Int)(using in: DataReader) = @@ -1372,5 +1234,3 @@ class ClassfileParser( /** Throws an exception signaling a bad tag at given address. */ private def errorBadTag(start: Int)(using in: DataReader) = throw new RuntimeException("bad constant pool tag " + in.getByte(start) + " at byte " + start) - } -} diff --git a/compiler/src/dotty/tools/dotc/core/classfile/DataReader.scala b/compiler/src/dotty/tools/dotc/core/classfile/DataReader.scala index d96bebf25fb7..f2edcbc62440 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/DataReader.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/DataReader.scala @@ -3,7 +3,7 @@ package dotc package core package classfile -trait DataReader { +trait DataReader: def bp: Int def bp_=(i: Int): Unit @@ -58,4 +58,3 @@ trait DataReader { /** skip next 'n' bytes */ def skip(n: Int): Unit -} diff --git a/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala b/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala index eb1649091f77..ccf5c338873a 100644 --- a/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala +++ b/compiler/src/dotty/tools/dotc/core/classfile/ReusableDataReader.scala @@ -8,143 +8,116 @@ import scala.language.unsafeNulls import java.io.{DataInputStream, InputStream} import java.nio.{BufferUnderflowException, ByteBuffer} -final class ReusableDataReader() extends DataReader { +final class ReusableDataReader() extends DataReader: private[this] var data = new Array[Byte](32768) private[this] var bb: ByteBuffer = ByteBuffer.wrap(data) private[this] var size = 0 - private[this] val reader: DataInputStream = { - val stream = new InputStream { - override def read(): Int = try { + private[this] val reader: DataInputStream = + val stream = new InputStream: + override def read(): Int = try bb.get & 0xff - } catch { + catch case _: BufferUnderflowException => -1 - } - override def read(b: Array[Byte], off: Int, len: Int): Int = { + override def read(b: Array[Byte], off: Int, len: Int): Int = val pos = bb.position() bb.get(b, off, len) bb.position() - pos - } override def markSupported(): Boolean = false - } new DataInputStream(stream) - } def buf: Array[Byte] = data private def nextPositivePowerOfTwo(target: Int): Int = 1 << -Integer.numberOfLeadingZeros(target - 1) - def reset(file: dotty.tools.io.AbstractFile): this.type = { + def reset(file: dotty.tools.io.AbstractFile): this.type = this.size = 0 - file.sizeOption match { + file.sizeOption match case Some(size) => - if (size > data.length) { + if (size > data.length) data = new Array[Byte](nextPositivePowerOfTwo(size)) - } else { + else java.util.Arrays.fill(data, 0.toByte) - } val input = file.input - try { + try var endOfInput = false - while (!endOfInput) { + while (!endOfInput) val remaining = data.length - this.size if (remaining == 0) endOfInput = true - else { + else val read = input.read(data, this.size, remaining) if (read < 0) endOfInput = true else this.size += read - } - } bb = ByteBuffer.wrap(data, 0, size) - } finally { + finally input.close() - } case None => val input = file.input - try { + try var endOfInput = false - while (!endOfInput) { + while (!endOfInput) val remaining = data.length - size - if (remaining == 0) { + if (remaining == 0) data = java.util.Arrays.copyOf(data, nextPositivePowerOfTwo(size)) - } val read = input.read(data, this.size, data.length - this.size) if (read < 0) endOfInput = true else this.size += read - } bb = ByteBuffer.wrap(data, 0, size) - } finally { + finally input.close() - } - } this - } @throws(classOf[IndexOutOfBoundsException]) def nextByte: Byte = bb.get - def nextBytes(len: Int): Array[Byte] = { + def nextBytes(len: Int): Array[Byte] = val result = new Array[Byte](len) reader.readFully(result) result - } def nextChar: Char = bb.getChar() def nextInt: Int = bb.getInt() - def getChar(mybp: Int): Char = { + def getChar(mybp: Int): Char = bb.getChar(mybp) - } - def getInt(mybp: Int): Int = { + def getInt(mybp: Int): Int = bb.getInt(mybp) - } - def getLong(mybp: Int): Long = { + def getLong(mybp: Int): Long = bb.getLong(mybp) - } - def getFloat(mybp: Int): Float = { + def getFloat(mybp: Int): Float = bb.getFloat(mybp) - } - def getDouble(mybp: Int): Double = { + def getDouble(mybp: Int): Double = bb.getDouble(mybp) - } - def skip(n: Int): Unit = { + def skip(n: Int): Unit = bb.position(bb.position() + n) - } def bp: Int = bb.position() - def bp_=(i: Int): Unit = { - try { + def bp_=(i: Int): Unit = + try bb.position(i) - } catch { + catch case ex: IllegalArgumentException => throw ex - } - } - def getByte(mybp: Int): Byte = { + def getByte(mybp: Int): Byte = bb.get(mybp) - } - def getBytes(mybp: Int, bytes: Array[Byte]): Unit = { + def getBytes(mybp: Int, bytes: Array[Byte]): Unit = val saved = bb.position() bb.position(mybp) try reader.readFully(bytes) finally bb.position(saved) - } - def getUTF(mybp: Int, len: Int): String = { + def getUTF(mybp: Int, len: Int): String = val saved = bb.position() val savedLimit = bb.limit() bb.position(mybp) bb.limit(mybp + len) try reader.readUTF() - finally { + finally bb.limit(savedLimit) bb.position(saved) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala index eb0d140df51e..dcbd8442bc1b 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/CommentUnpickler.scala @@ -12,24 +12,20 @@ import TastyBuffer.Addr import java.nio.charset.StandardCharsets -class CommentUnpickler(reader: TastyReader) { +class CommentUnpickler(reader: TastyReader): import reader._ - private[tasty] lazy val comments: HashMap[Addr, Comment] = { + private[tasty] lazy val comments: HashMap[Addr, Comment] = val comments = new HashMap[Addr, Comment] - while (!isAtEnd) { + while (!isAtEnd) val addr = readAddr() val length = readNat() - if (length > 0) { + if (length > 0) val bytes = readBytes(length) val position = new Span(readLongInt()) val rawComment = new String(bytes, StandardCharsets.UTF_8) comments(addr) = Comment(position, rawComment) - } - } comments - } def commentAt(addr: Addr): Option[Comment] = comments.get(addr) -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala index b35c5c9f1acc..2ad70492f805 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala @@ -15,33 +15,29 @@ import TreeUnpickler.UnpickleMode import dotty.tools.tasty.TastyReader import dotty.tools.tasty.TastyFormat.{ASTsSection, PositionsSection, CommentsSection} -object DottyUnpickler { +object DottyUnpickler: /** Exception thrown if classfile is corrupted */ class BadSignature(msg: String) extends RuntimeException(msg) class TreeSectionUnpickler(posUnpickler: Option[PositionUnpickler], commentUnpickler: Option[CommentUnpickler]) - extends SectionUnpickler[TreeUnpickler](ASTsSection) { + extends SectionUnpickler[TreeUnpickler](ASTsSection): def unpickle(reader: TastyReader, nameAtRef: NameTable): TreeUnpickler = new TreeUnpickler(reader, nameAtRef, posUnpickler, commentUnpickler) - } - class PositionsSectionUnpickler extends SectionUnpickler[PositionUnpickler](PositionsSection) { + class PositionsSectionUnpickler extends SectionUnpickler[PositionUnpickler](PositionsSection): def unpickle(reader: TastyReader, nameAtRef: NameTable): PositionUnpickler = new PositionUnpickler(reader, nameAtRef) - } - class CommentsSectionUnpickler extends SectionUnpickler[CommentUnpickler](CommentsSection) { + class CommentsSectionUnpickler extends SectionUnpickler[CommentUnpickler](CommentsSection): def unpickle(reader: TastyReader, nameAtRef: NameTable): CommentUnpickler = new CommentUnpickler(reader) - } -} /** A class for unpickling Tasty trees and symbols. * @param bytes the bytearray containing the Tasty file from which we unpickle * @param mode the tasty file contains package (TopLevel), an expression (Term) or a type (TypeTree) */ -class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLevel) extends ClassfileParser.Embedded with tpd.TreeProvider { +class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLevel) extends ClassfileParser.Embedded with tpd.TreeProvider: import tpd._ import DottyUnpickler._ @@ -63,12 +59,10 @@ class DottyUnpickler(bytes: Array[Byte], mode: UnpickleMode = UnpickleMode.TopLe private var ids: Array[String] = null - override def mightContain(id: String)(using Context): Boolean = { + override def mightContain(id: String)(using Context): Boolean = if (ids == null) ids = unpickler.nameAtRef.contents.toArray.collect { case name: SimpleName => name.toString }.sorted ids.binarySearch(id) >= 0 - } -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala index 1ddcf9afe1dc..fb5b331bfbaa 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/NameBuffer.scala @@ -13,27 +13,26 @@ import NameOps._ import scala.io.Codec import NameTags.{SIGNED, TARGETSIGNED} -class NameBuffer extends TastyBuffer(10000) { +class NameBuffer extends TastyBuffer(10000): import NameBuffer._ private val nameRefs = new mutable.LinkedHashMap[Name, NameRef] - def nameIndex(name: Name): NameRef = { + def nameIndex(name: Name): NameRef = val name1 = name.toTermName - nameRefs.get(name1) match { + nameRefs.get(name1) match case Some(ref) => ref case None => - name1 match { + name1 match case SignedName(original, Signature(params, result), target) => nameIndex(original) if !original.matchesTargetName(target) then nameIndex(target) nameIndex(result) - params.foreach { + params.foreach: case param: TypeName => nameIndex(param) case _ => - } case AnyQualifiedName(prefix, name) => nameIndex(prefix); nameIndex(name) case AnyUniqueName(original, separator, num) => @@ -42,14 +41,11 @@ class NameBuffer extends TastyBuffer(10000) { case DerivedName(original, _) => nameIndex(original) case _ => - } val ref = NameRef(nameRefs.size) nameRefs(name1) = ref ref - } - } - private inline def withLength(inline op: Unit, lengthWidth: Int = 1): Unit = { + private inline def withLength(inline op: Unit, lengthWidth: Int = 1): Unit = val lengthAddr = currentAddr var i = 0 while i < lengthWidth do @@ -58,24 +54,21 @@ class NameBuffer extends TastyBuffer(10000) { op val length = currentAddr.index - lengthAddr.index - lengthWidth putNat(lengthAddr, length, lengthWidth) - } def writeNameRef(ref: NameRef): Unit = writeNat(ref.index) def writeNameRef(name: Name): Unit = writeNameRef(nameRefs(name.toTermName)) - def writeParamSig(paramSig: Signature.ParamSig): Unit ={ - val encodedValue = paramSig match { + def writeParamSig(paramSig: Signature.ParamSig): Unit = + val encodedValue = paramSig match case paramSig: TypeName => nameRefs(paramSig.toTermName).index case paramSig: Int => -paramSig - } writeInt(encodedValue) - } - def pickleNameContents(name: Name): Unit = { + def pickleNameContents(name: Name): Unit = val tag = name.toTermName.info.kind.tag - name.toTermName match { + name.toTermName match case name: SimpleName => writeByte(tag) val bytes = @@ -88,11 +81,10 @@ class NameBuffer extends TastyBuffer(10000) { withLength { writeNameRef(prefix); writeNameRef(name) } case AnyUniqueName(original, separator, num) => writeByte(tag) - withLength { + withLength: writeNameRef(separator) writeNat(num) if (!original.isEmpty) writeNameRef(original) - } case AnyNumberedName(original, num) => writeByte(tag) withLength { writeNameRef(original); writeNat(num) } @@ -109,21 +101,16 @@ class NameBuffer extends TastyBuffer(10000) { case DerivedName(original, _) => writeByte(tag) withLength { writeNameRef(original) } - } - } - override def assemble(): Unit = { + override def assemble(): Unit = var i = 0 for (name, ref) <- nameRefs do val ref = nameRefs(name) assert(ref.index == i) i += 1 pickleNameContents(name) - } -} -object NameBuffer { +object NameBuffer: private val maxIndexWidth = 3 // allows name indices up to 2^21. private val payloadBitsPerByte = 7 // determined by nat encoding in TastyBuffer private val maxNumInByte = (1 << payloadBitsPerByte) - 1 -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala index 924b87bec003..1993e21eb70f 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionPickler.scala @@ -43,7 +43,7 @@ object PositionPickler: pickler.newSection(PositionsSection, buf) /** Pickle the number of lines followed by the length of each line */ - def pickleLineOffsets(): Unit = { + def pickleLineOffsets(): Unit = val content = source.content() buf.writeNat(content.count(_ == '\n') + 1) // number of lines var lastIndex = content.indexOf('\n', 0) @@ -53,12 +53,11 @@ object PositionPickler: val end = if nextIndex != -1 then nextIndex else content.length buf.writeNat(end - lastIndex - 1) // size of the next line lastIndex = nextIndex - } pickleLineOffsets() var lastIndex = 0 var lastSpan = Span(0, 0) - def pickleDeltas(index: Int, span: Span) = { + def pickleDeltas(index: Int, span: Span) = val addrDelta = index - lastIndex val startDelta = span.start - lastSpan.start val endDelta = span.end - lastSpan.end @@ -74,17 +73,15 @@ object PositionPickler: // that forwards to the specialized `addOne` in `BitSet`. Since the // current backend does not implement `@inline` we are missing the // specialization. - } - def pickleSource(source: SourceFile): Unit = { + def pickleSource(source: SourceFile): Unit = buf.writeInt(SOURCE) val relativePath = SourceFile.relativePath(source, relativePathReference) buf.writeInt(pickler.nameBuffer.nameIndex(relativePath.toTermName).index) - } /** True if x's position shouldn't be reconstructed automatically from its initial span */ - def alwaysNeedsPos(x: Positioned) = x match { + def alwaysNeedsPos(x: Positioned) = x match case // initialSpan is inaccurate for trees with lazy field _: WithLazyFields @@ -101,39 +98,33 @@ object PositionPickler: // they might lose their position | _: Trees.Hole[?] => true case _ => false - } - def traverse(x: Any, current: SourceFile): Unit = x match { + def traverse(x: Any, current: SourceFile): Unit = x match case x: untpd.Tree => - if (x.span.exists) { + if (x.span.exists) val addr = addrOfTree(x) - if (addr != NoAddr) { - if (x.source != current) { + if (addr != NoAddr) + if (x.source != current) // we currently do not share trees when unpickling, so if one path to a tree contains // a source change while another does not, we have to record the position of the tree twice // in order not to miss the source change. Test case is t3232a.scala. pickleDeltas(addr.index, x.span) pickleSource(x.source) - } else if (!pickledIndices.contains(addr.index) && (x.span.toSynthetic != x.envelope(x.source) || alwaysNeedsPos(x))) pickleDeltas(addr.index, x.span) - } - } x match case x: untpd.MemberDef => traverse(treeAnnots(x), x.source) case _ => val limit = x.productArity var n = 0 - while (n < limit) { + while (n < limit) traverse(x.productElement(n), x.source) n += 1 - } case y :: ys => traverse(y, current) traverse(ys, current) case _ => - } for (root <- roots) traverse(root, NoSource) end picklePositions diff --git a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala index 9c66e43eae80..624273ec2d90 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/PositionUnpickler.scala @@ -11,7 +11,7 @@ import util.Spans._ import Names.TermName /** Unpickler for tree positions */ -class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { +class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName): import reader._ private var myLineSizes: Array[Int] = _ @@ -19,8 +19,8 @@ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { private var mySourcePaths: util.HashMap[Addr, String] = _ private var isDefined = false - def ensureDefined(): Unit = { - if (!isDefined) { + def ensureDefined(): Unit = + if (!isDefined) val lines = readNat() myLineSizes = new Array[Int](lines) var i = 0 @@ -33,13 +33,12 @@ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { var curIndex = 0 var curStart = 0 var curEnd = 0 - while (!isAtEnd) { + while (!isAtEnd) val header = readInt() - if (header == SOURCE) { + if (header == SOURCE) val path = nameAtRef(readNameRef()).toString mySourcePaths(Addr(curIndex)) = path - } - else { + else val addrDelta = header >> 3 val hasStart = (header & 4) != 0 val hasEnd = (header & 2) != 0 @@ -51,27 +50,19 @@ class PositionUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName) { mySpans(Addr(curIndex)) = if (hasPoint) Span(curStart, curEnd, curStart + readInt()) else Span(curStart, curEnd) - } - } isDefined = true - } - } - private[tasty] def spans: util.ReadOnlyMap[Addr, Span] = { + private[tasty] def spans: util.ReadOnlyMap[Addr, Span] = ensureDefined() mySpans - } - private[tasty] def sourcePaths: util.ReadOnlyMap[Addr, String] = { + private[tasty] def sourcePaths: util.ReadOnlyMap[Addr, String] = ensureDefined() mySourcePaths - } - private[tasty] def lineSizes: Array[Int] = { + private[tasty] def lineSizes: Array[Int] = ensureDefined() myLineSizes - } def spanAt(addr: Addr): Span = spans.getOrElse(addr, NoSpan) def sourcePathAt(addr: Addr): String = sourcePaths.getOrElse(addr, "") -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala index d8d72a4e651e..a247e03f0164 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyAnsiiPrinter.scala @@ -2,8 +2,7 @@ package dotty.tools.dotc package core package tasty -class TastyAnsiiPrinter(bytes: Array[Byte]) extends TastyPrinter(bytes) { +class TastyAnsiiPrinter(bytes: Array[Byte]) extends TastyPrinter(bytes): override protected def nameStr(str: String): String = Console.MAGENTA + str + Console.RESET override protected def treeStr(str: String): String = Console.YELLOW + str + Console.RESET override protected def lengthStr(str: String): String = Console.CYAN + str + Console.RESET -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala index c938868a3c48..8d5f06bb9eb6 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyClassName.scala @@ -12,7 +12,7 @@ import TastyUnpickler._ import dotty.tools.tasty.TastyFormat.ASTsSection /** Reads the package and class name of the class contained in this TASTy */ -class TastyClassName(bytes: Array[Byte]) { +class TastyClassName(bytes: Array[Byte]): val unpickler: TastyUnpickler = new TastyUnpickler(bytes) import unpickler.{nameAtRef, unpickle} @@ -20,16 +20,16 @@ class TastyClassName(bytes: Array[Byte]) { /** Returns a tuple with the package and class names */ def readName(): Option[(TermName, TermName)] = unpickle(new TreeSectionUnpickler) - class TreeSectionUnpickler extends SectionUnpickler[(TermName, TermName)](ASTsSection) { + class TreeSectionUnpickler extends SectionUnpickler[(TermName, TermName)](ASTsSection): import dotty.tools.tasty.TastyFormat._ - def unpickle(reader: TastyReader, tastyName: NameTable): (TermName, TermName) = { + def unpickle(reader: TastyReader, tastyName: NameTable): (TermName, TermName) = import reader._ - def readNames(packageName: TermName): (TermName, TermName) = { + def readNames(packageName: TermName): (TermName, TermName) = val tag = readByte() - if (tag >= firstLengthTreeTag) { + if (tag >= firstLengthTreeTag) val len = readNat() val end = currentAddr + len - tag match { + tag match case TYPEDEF => val className = reader.readName() goto(end) @@ -39,9 +39,7 @@ class TastyClassName(bytes: Array[Byte]) { readNames(packageName) case PACKAGE => readNames(packageName) - } - } - else tag match { + else tag match case TERMREFpkg | TYPEREFpkg => val subPackageName = reader.readName() readNames(subPackageName) @@ -54,15 +52,9 @@ class TastyClassName(bytes: Array[Byte]) { readNames(subPackageName) case _ => readNames(packageName) - } - } readNames(nme.EMPTY_PACKAGE) - } - extension (reader: TastyReader) def readName() = { + extension (reader: TastyReader) def readName() = val idx = reader.readNat() nameAtRef(NameRef(idx)) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala index b234705413ae..92dbf7dfc2f9 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyHTMLPrinter.scala @@ -2,8 +2,7 @@ package dotty.tools.dotc package core package tasty -class TastyHTMLPrinter(bytes: Array[Byte]) extends TastyPrinter(bytes) { +class TastyHTMLPrinter(bytes: Array[Byte]) extends TastyPrinter(bytes): override protected def nameStr(str: String): String = s"$str" override protected def treeStr(str: String): String = s"$str" override protected def lengthStr(str: String): String = s"$str" -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala index 4f1e84ac9184..08e2cf7a3d2f 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPickler.scala @@ -13,16 +13,14 @@ import collection.mutable import core.Symbols.ClassSymbol import Decorators._ -object TastyPickler { +object TastyPickler: - private val versionStringBytes = { + private val versionStringBytes = val compilerString = s"Scala ${config.Properties.simpleVersionString}" compilerString.getBytes(java.nio.charset.StandardCharsets.UTF_8) - } -} -class TastyPickler(val rootCls: ClassSymbol) { +class TastyPickler(val rootCls: ClassSymbol): private val sections = new mutable.ArrayBuffer[(NameRef, TastyBuffer)] @@ -31,7 +29,7 @@ class TastyPickler(val rootCls: ClassSymbol) { def newSection(name: String, buf: TastyBuffer): Unit = sections += ((nameBuffer.nameIndex(name.toTermName), buf)) - def assembleParts(): Array[Byte] = { + def assembleParts(): Array[Byte] = def lengthWithLength(buf: TastyBuffer) = buf.length + natSize(buf.length) @@ -47,7 +45,7 @@ class TastyPickler(val rootCls: ClassSymbol) { // Hash of positions, comments and any additional section val uuidHi: Long = otherSectionHashes.fold(0L)(_ ^ _) - val headerBuffer = { + val headerBuffer = val buf = new TastyBuffer(header.length + TastyPickler.versionStringBytes.length + 32) for (ch <- header) buf.writeByte(ch.toByte) buf.writeNat(MajorVersion) @@ -58,7 +56,6 @@ class TastyPickler(val rootCls: ClassSymbol) { buf.writeUncompressedLong(uuidLow) buf.writeUncompressedLong(uuidHi) buf - } val totalSize = headerBuffer.length + @@ -70,12 +67,9 @@ class TastyPickler(val rootCls: ClassSymbol) { all.writeBytes(headerBuffer.bytes, headerBuffer.length) all.writeNat(nameBuffer.length) all.writeBytes(nameBuffer.bytes, nameBuffer.length) - for ((nameRef, buf) <- sections) { + for ((nameRef, buf) <- sections) all.writeNat(nameRef.index) all.writeNat(buf.length) all.writeBytes(buf.bytes, buf.length) - } assert(all.length == totalSize && all.bytes.length == totalSize, s"totalSize = $totalSize, all.length = ${all.length}, all.bytes.length = ${all.bytes.length}") all.bytes - } -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala index 5876b69edfde..361ab79f48a2 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyPrinter.scala @@ -21,7 +21,7 @@ object TastyPrinter: else new TastyAnsiiPrinter(bytes) printer.showContents() - def main(args: Array[String]): Unit = { + def main(args: Array[String]): Unit = // TODO: Decouple CliCommand from Context and use CliCommand.distill? val lineWidth = 80 val line = "-" * lineWidth @@ -56,9 +56,8 @@ object TastyPrinter: if printLastLine then println(line) - } -class TastyPrinter(bytes: Array[Byte]) { +class TastyPrinter(bytes: Array[Byte]): private val sb: StringBuilder = new StringBuilder @@ -70,61 +69,54 @@ class TastyPrinter(bytes: Array[Byte]) { private def nameRefToString(ref: NameRef): String = nameToString(nameAtRef(ref)) private def printNames(): Unit = - for ((name, idx) <- nameAtRef.contents.zipWithIndex) { + for ((name, idx) <- nameAtRef.contents.zipWithIndex) val index = nameStr("%4d".format(idx)) sb.append(index).append(": ").append(nameToString(name)).append("\n") - } - def showContents(): String = { + def showContents(): String = sb.append("Names:\n") printNames() sb.append("\n") sb.append("Trees:\n") - unpickle(new TreeSectionUnpickler) match { + unpickle(new TreeSectionUnpickler) match case Some(s) => sb.append(s) case _ => - } sb.append("\n\n") - unpickle(new PositionSectionUnpickler) match { + unpickle(new PositionSectionUnpickler) match case Some(s) => sb.append(s) case _ => - } sb.append("\n\n") - unpickle(new CommentSectionUnpickler) match { + unpickle(new CommentSectionUnpickler) match case Some(s) => sb.append(s) case _ => - } sb.result - } - class TreeSectionUnpickler extends SectionUnpickler[String](ASTsSection) { + class TreeSectionUnpickler extends SectionUnpickler[String](ASTsSection): import dotty.tools.tasty.TastyFormat._ private val sb: StringBuilder = new StringBuilder - def unpickle(reader: TastyReader, tastyName: NameTable): String = { + def unpickle(reader: TastyReader, tastyName: NameTable): String = import reader._ var indent = 0 - def newLine() = { + def newLine() = val length = treeStr("%5d".format(index(currentAddr) - index(startAddr))) sb.append(s"\n $length:" + " " * indent) - } def printNat() = sb.append(treeStr(" " + readNat())) - def printName() = { + def printName() = val idx = readNat() sb.append(nameStr(" " + idx + " [" + nameRefToString(NameRef(idx)) + "]")) - } - def printTree(): Unit = { + def printTree(): Unit = newLine() val tag = readByte() sb.append(" ").append(astTagToString(tag)) indent += 2 - if (tag >= firstLengthTreeTag) { + if (tag >= firstLengthTreeTag) val len = readNat() sb.append(s"(${lengthStr(len.toString)})") val end = currentAddr + len def printTrees() = until(end)(printTree()) - tag match { + tag match case RENAMED => printName(); printName() case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | BIND => @@ -141,43 +133,33 @@ class TastyPrinter(bytes: Array[Byte]) { printNat(); printNat() case _ => printTrees() - } - if (currentAddr != end) { + if (currentAddr != end) sb.append(s"incomplete read, current = $currentAddr, end = $end\n") goto(end) - } - } - else if (tag >= firstNatASTTreeTag) { - tag match { + else if (tag >= firstNatASTTreeTag) + tag match case IDENT | IDENTtpt | SELECT | SELECTtpt | TERMREF | TYPEREF | SELFDEF => printName() case _ => printNat() - } printTree() - } else if (tag >= firstASTTreeTag) printTree() else if (tag >= firstNatTreeTag) - tag match { + tag match case TERMREFpkg | TYPEREFpkg | STRINGconst | IMPORTED => printName() case _ => printNat() - } indent -= 2 - } sb.append(s"start = ${reader.startAddr}, base = $base, current = $currentAddr, end = $endAddr\n") sb.append(s"${endAddr.index - startAddr.index} bytes of AST, base = $currentAddr\n") - while (!isAtEnd) { + while (!isAtEnd) printTree() newLine() - } sb.result - } - } - class PositionSectionUnpickler extends SectionUnpickler[String](PositionsSection) { + class PositionSectionUnpickler extends SectionUnpickler[String](PositionsSection): private val sb: StringBuilder = new StringBuilder - def unpickle(reader: TastyReader, tastyName: NameTable): String = { + def unpickle(reader: TastyReader, tastyName: NameTable): String = val posUnpickler = new PositionUnpickler(reader, tastyName) sb.append(s" ${reader.endAddr.index - reader.currentAddr.index}") sb.append(" position bytes:\n") @@ -187,42 +169,34 @@ class TastyPrinter(bytes: Array[Byte]) { sb.append(" positions:\n") val spans = posUnpickler.spans val sorted = spans.toSeq.sortBy(_._1.index) - for ((addr, pos) <- sorted) { + for ((addr, pos) <- sorted) sb.append(treeStr("%10d".format(addr.index))) sb.append(s": ${offsetToInt(pos.start)} .. ${pos.end}\n") - } val sources = posUnpickler.sourcePaths sb.append(s"\n source paths:\n") val sortedPath = sources.toSeq.sortBy(_._1.index) - for ((addr, path) <- sortedPath) { + for ((addr, path) <- sortedPath) sb.append(treeStr("%10d: ".format(addr.index))) sb.append(path) sb.append("\n") - } sb.result - } - } - class CommentSectionUnpickler extends SectionUnpickler[String](CommentsSection) { + class CommentSectionUnpickler extends SectionUnpickler[String](CommentsSection): private val sb: StringBuilder = new StringBuilder - def unpickle(reader: TastyReader, tastyName: NameTable): String = { + def unpickle(reader: TastyReader, tastyName: NameTable): String = sb.append(s" ${reader.endAddr.index - reader.currentAddr.index}") val comments = new CommentUnpickler(reader).comments sb.append(s" comment bytes:\n") val sorted = comments.toSeq.sortBy(_._1.index) - for ((addr, cmt) <- sorted) { + for ((addr, cmt) <- sorted) sb.append(treeStr("%10d".format(addr.index))) sb.append(s": ${cmt.raw} (expanded = ${cmt.isExpanded})\n") - } sb.result - } - } protected def nameStr(str: String): String = str protected def treeStr(str: String): String = str protected def lengthStr(str: String): String = str -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala index 70bdec7780e2..3f1f8222eb9e 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyUnpickler.scala @@ -12,23 +12,20 @@ import scala.collection.mutable import Names.{TermName, termName, EmptyTermName} import NameKinds._ -object TastyUnpickler { +object TastyUnpickler: - abstract class SectionUnpickler[R](val name: String) { + abstract class SectionUnpickler[R](val name: String): def unpickle(reader: TastyReader, nameAtRef: NameTable): R - } - class NameTable extends (NameRef => TermName) { + class NameTable extends (NameRef => TermName): private val names = new mutable.ArrayBuffer[TermName] def add(name: TermName): mutable.ArrayBuffer[TermName] = names += name def apply(ref: NameRef): TermName = names(ref.index) def contents: Iterable[TermName] = names - } -} import TastyUnpickler._ -class TastyUnpickler(reader: TastyReader) { +class TastyUnpickler(reader: TastyReader): import reader._ def this(bytes: Array[Byte]) = this(new TastyReader(bytes)) @@ -39,15 +36,14 @@ class TastyUnpickler(reader: TastyReader) { private def readName(): TermName = nameAtRef(readNameRef()) private def readString(): String = readName().toString - private def readParamSig(): Signature.ParamSig = { + private def readParamSig(): Signature.ParamSig = val ref = readInt() if (ref < 0) ref.abs else nameAtRef(NameRef(ref)).toTypeName - } - private def readNameContents(): TermName = { + private def readNameContents(): TermName = val tag = readByte() val length = readNat() val start = currentAddr @@ -58,7 +54,7 @@ class TastyUnpickler(reader: TastyReader) { val sig = Signature(paramsSig, result) SignedName(original, sig, target) - val result = tag match { + val result = tag match case UTF8 => goto(end) termName(bytes, start.index, length) @@ -83,26 +79,21 @@ class TastyUnpickler(reader: TastyReader) { simpleNameKindOfTag(tag)(readName()) case _ => throw MatchError(s"unknown name tag ${nameTagToString(tag)}") - } assert(currentAddr == end, s"bad name $result $start $currentAddr $end") result - } new TastyHeaderUnpickler(reader).readHeader() - locally { + locally: until(readEnd()) { nameAtRef.add(readNameContents()) } - while (!isAtEnd) { + while (!isAtEnd) val secName = readString() val secEnd = readEnd() sectionReader(secName) = new TastyReader(bytes, currentAddr.index, secEnd.index, currentAddr.index) goto(secEnd) - } - } def unpickle[R](sec: SectionUnpickler[R]): Option[R] = for (reader <- sectionReader.get(sec.name)) yield sec.unpickle(reader, nameAtRef) private[dotc] def bytes: Array[Byte] = reader.bytes -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala index d0f08379c114..efaa888a5fbe 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeBuffer.scala @@ -12,7 +12,7 @@ import config.Printers.pickling import ast.untpd.Tree import java.util.Arrays -class TreeBuffer extends TastyBuffer(50000) { +class TreeBuffer extends TastyBuffer(50000): private inline val ItemsOverOffsets = 2 private val initialOffsetSize = bytes.length / (AddrWidth * ItemsOverOffsets) @@ -37,41 +37,35 @@ class TreeBuffer extends TastyBuffer(50000) { private def offset(i: Int): Addr = Addr(offsets(i)) - private def keepOffset(relative: Boolean): Unit = { - if (numOffsets == offsets.length) { + private def keepOffset(relative: Boolean): Unit = + if (numOffsets == offsets.length) offsets = dble(offsets) isRelative = dble(isRelative) - } offsets(numOffsets) = length isRelative(numOffsets) = relative numOffsets += 1 - } /** Reserve space for a reference, to be adjusted later */ - def reserveRef(relative: Boolean): Addr = { + def reserveRef(relative: Boolean): Addr = val addr = currentAddr keepOffset(relative) reserveAddr() addr - } /** Write reference right adjusted into freshly reserved field. */ - def writeRef(target: Addr): Unit = { + def writeRef(target: Addr): Unit = keepOffset(relative = false) fillAddr(reserveAddr(), target) - } /** Fill previously reserved field with a reference */ - def fillRef(at: Addr, target: Addr, relative: Boolean): Unit = { + def fillRef(at: Addr, target: Addr, relative: Boolean): Unit = val addr = if (relative) target.relativeTo(at) else target fillAddr(at, addr) - } /** The amount by which the bytes at the given address are shifted under compression */ - def deltaAt(at: Addr, scratch: ScratchData): Int = { + def deltaAt(at: Addr, scratch: ScratchData): Int = val idx = bestFit(offsets, numOffsets, at.index - 1) if (idx < 0) 0 else scratch.delta(idx) - } /** The address to which `x` is translated under compression */ def adjusted(x: Addr, scratch: ScratchData): Addr = x - deltaAt(x, scratch) @@ -92,11 +86,11 @@ class TreeBuffer extends TastyBuffer(50000) { arr /** Compute all shift-deltas */ - def computeDeltas() = { + def computeDeltas() = scratch.delta = reserve(scratch.delta) var lastDelta = 0 var i = 0 - while (i < numOffsets) { + while (i < numOffsets) val off = offset(i) val skippedOff = skipZeroes(off) val skippedCount = skippedOff.index - off.index @@ -104,23 +98,19 @@ class TreeBuffer extends TastyBuffer(50000) { lastDelta += skippedCount scratch.delta(i) = lastDelta i += 1 - } - } /** The absolute or relative adjusted address at index `i` of `offsets` array*/ - def adjustedOffset(i: Int): Addr = { + def adjustedOffset(i: Int): Addr = val at = offset(i) val original = getAddr(at) - if (isRelative(i)) { + if (isRelative(i)) val start = skipNat(at) val len1 = original + scratch.delta(i) - deltaAt(original + start.index, scratch) val len2 = adjusted(original + start.index, scratch) - adjusted(start, scratch).index assert(len1 == len2, s"adjusting offset #$i: $at, original = $original, len1 = $len1, len2 = $len2") len1 - } else adjusted(original, scratch) - } /** Adjust all offsets according to previously computed deltas */ def adjustOffsets(): Unit = @@ -134,7 +124,7 @@ class TreeBuffer extends TastyBuffer(50000) { * generate additional zeroes that can be skipped) due to previously * computed adjustments. */ - def adjustDeltas(): Int = { + def adjustDeltas(): Int = scratch.delta1 = reserve(scratch.delta1) var lastDelta = 0 var i = 0 @@ -150,17 +140,16 @@ class TreeBuffer extends TastyBuffer(50000) { scratch.delta = scratch.delta1 scratch.delta1 = tmp saved - } /** Compress pickle buffer, shifting bytes to close all skipped zeroes. */ - def compress(): Int = { + def compress(): Int = var lastDelta = 0 var start = 0 var i = 0 var wasted = 0 def shift(end: Int) = System.arraycopy(bytes, start, bytes, start - lastDelta, end - start) - while (i < numOffsets) { + while (i < numOffsets) val next = offsets(i) shift(next) start = next + scratch.delta(i) - lastDelta @@ -169,11 +158,9 @@ class TreeBuffer extends TastyBuffer(50000) { wasted += (pastZeroes - start) lastDelta = scratch.delta(i) i += 1 - } shift(length) length -= lastDelta wasted - } def adjustTreeAddrs(): Unit = var i = 0 @@ -196,4 +183,3 @@ class TreeBuffer extends TastyBuffer(50000) { val wasted = compress() pickling.println(s"original length: $origLength, compressed to: $length, wasted: $wasted") // DEBUG, for now. end compactify -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 645c6f81e539..246c1ad911e6 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -23,7 +23,7 @@ import dotty.tools.tasty.TastyFormat.ASTsSection object TreePickler: class StackSizeExceeded(val mdef: tpd.MemberDef) extends Exception -class TreePickler(pickler: TastyPickler) { +class TreePickler(pickler: TastyPickler): val buf: TreeBuffer = new TreeBuffer pickler.newSection(ASTsSection, buf) import buf._ @@ -56,20 +56,18 @@ class TreePickler(pickler: TastyPickler) { def docString(tree: untpd.MemberDef): Option[Comment] = Option(docStrings.lookup(tree)) - private inline def withLength(inline op: Unit) = { + private inline def withLength(inline op: Unit) = val lengthAddr = reserveRef(relative = true) op fillRef(lengthAddr, currentAddr, relative = true) - } def addrOfSym(sym: Symbol): Option[Addr] = symRefs.get(sym) - def preRegister(tree: Tree)(using Context): Unit = tree match { + def preRegister(tree: Tree)(using Context): Unit = tree match case tree: MemberDef => if (!symRefs.contains(tree.symbol)) symRefs(tree.symbol) = NoAddr case _ => - } def registerDef(sym: Symbol): Unit = symRefs(sym) = currentAddr @@ -99,16 +97,15 @@ class TreePickler(pickler: TastyPickler) { else writeRef(label.uncheckedNN) // !!! Dotty problem: Not clear why nn or uncheckedNN is needed here - private def pickleForwardSymRef(sym: Symbol)(using Context) = { + private def pickleForwardSymRef(sym: Symbol)(using Context) = val ref = reserveRef(relative = false) assert(!sym.is(Flags.Package), sym) forwardSymRefs(sym) = ref :: forwardSymRefs.getOrElse(sym, Nil) - } private def isLocallyDefined(sym: Symbol)(using Context) = sym.topLevelClass.isLinkedWith(pickler.rootCls) - def pickleConstant(c: Constant)(using Context): Unit = c.tag match { + def pickleConstant(c: Constant)(using Context): Unit = c.tag match case UnitTag => writeByte(UNITconst) case BooleanTag => @@ -142,7 +139,6 @@ class TreePickler(pickler: TastyPickler) { case ClazzTag => writeByte(CLASSconst) pickleType(c.typeValue) - } def pickleVariances(tp: Type)(using Context): Unit = tp match case tp: HKTypeLambda if tp.isDeclaredVarianceLambda => @@ -153,27 +149,22 @@ class TreePickler(pickler: TastyPickler) { else STABLE) case _ => - def pickleType(tpe0: Type, richTypes: Boolean = false)(using Context): Unit = { + def pickleType(tpe0: Type, richTypes: Boolean = false)(using Context): Unit = val tpe = tpe0.stripTypeVar - try { + try val prev: Addr | Null = pickledTypes.lookup(tpe) - if (prev == null) { + if (prev == null) pickledTypes(tpe) = currentAddr pickleNewType(tpe, richTypes) - } - else { + else writeByte(SHAREDtype) writeRef(prev.uncheckedNN) - } - } - catch { + catch case ex: AssertionError => println(i"error when pickling type $tpe") throw ex - } - } - private def pickleNewType(tpe: Type, richTypes: Boolean)(using Context): Unit = tpe match { + private def pickleNewType(tpe: Type, richTypes: Boolean)(using Context): Unit = tpe match case AppliedType(tycon, args) => if tycon.typeSymbol == defn.MatchCaseClass then writeByte(MATCHCASEtype) @@ -185,53 +176,43 @@ class TreePickler(pickler: TastyPickler) { pickleConstant(value) case tpe: NamedType => val sym = tpe.symbol - def pickleExternalRef(sym: Symbol) = { + def pickleExternalRef(sym: Symbol) = val isShadowedRef = sym.isClass && tpe.prefix.member(sym.name).symbol != sym - if (sym.is(Flags.Private) || isShadowedRef) { + if (sym.is(Flags.Private) || isShadowedRef) writeByte(if (tpe.isType) TYPEREFin else TERMREFin) - withLength { + withLength: pickleNameAndSig(sym.name, sym.signature, sym.targetName) pickleType(tpe.prefix) pickleType(sym.owner.typeRef) - } - } - else { + else writeByte(if (tpe.isType) TYPEREF else TERMREF) pickleNameAndSig(sym.name, tpe.signature, sym.targetName) pickleType(tpe.prefix) - } - } - if (sym.is(Flags.Package)) { + if (sym.is(Flags.Package)) writeByte(if (tpe.isType) TYPEREFpkg else TERMREFpkg) pickleName(sym.fullName) - } - else if (tpe.prefix == NoPrefix) { + else if (tpe.prefix == NoPrefix) writeByte(if (tpe.isType) TYPEREFdirect else TERMREFdirect) if Config.checkLevelsOnConstraints && !symRefs.contains(sym) && !sym.isPatternBound && !sym.hasAnnotation(defn.QuotedRuntimePatterns_patternTypeAnnot) then report.error(em"pickling reference to as yet undefined $tpe with symbol ${sym}", sym.srcPos) pickleSymRef(sym) - } - else tpe.designator match { + else tpe.designator match case name: Name => writeByte(if (tpe.isType) TYPEREF else TERMREF) pickleName(name); pickleType(tpe.prefix) case sym: Symbol => - if (isLocallyDefined(sym)) { + if (isLocallyDefined(sym)) writeByte(if (tpe.isType) TYPEREFsymbol else TERMREFsymbol) pickleSymRef(sym); pickleType(tpe.prefix) - } else pickleExternalRef(sym) - } case tpe: ThisType => - if (tpe.cls.is(Flags.Package) && !tpe.cls.isEffectiveRoot) { + if (tpe.cls.is(Flags.Package) && !tpe.cls.isEffectiveRoot) writeByte(TERMREFpkg) pickleName(tpe.cls.fullName) - } - else { + else writeByte(THIS) pickleType(tpe.tref) - } case tpe: SuperType => writeByte(SUPERtype) withLength { pickleType(tpe.thistpe); pickleType(tpe.supertpe) } @@ -244,22 +225,20 @@ class TreePickler(pickler: TastyPickler) { pickleType(tpe.info) case tpe: RefinedType => writeByte(REFINEDtype) - withLength { + withLength: pickleName(tpe.refinedName) pickleType(tpe.parent) pickleType(tpe.refinedInfo, richTypes = true) - } case tpe: RecType => writeByte(RECtype) pickleType(tpe.parent) case tpe: TypeBounds => writeByte(TYPEBOUNDS) - withLength { + withLength: pickleType(tpe.lo, richTypes) if !tpe.isInstanceOf[AliasingBounds] then pickleType(tpe.hi, richTypes) pickleVariances(tpe.hi) - } case tpe: AnnotatedType => writeByte(ANNOTATEDtype) withLength { pickleType(tpe.parent, richTypes); pickleTree(tpe.annot.tree) } @@ -276,11 +255,10 @@ class TreePickler(pickler: TastyPickler) { pickleMethodic(TYPELAMBDAtype, tpe, EmptyFlags) case tpe: MatchType => writeByte(MATCHtype) - withLength { + withLength: pickleType(tpe.bound) pickleType(tpe.scrutinee) tpe.cases.foreach(pickleType(_)) - } case tpe: PolyType if richTypes => pickleMethodic(POLYtype, tpe, EmptyFlags) case tpe: MethodType if richTypes => @@ -292,37 +270,31 @@ class TreePickler(pickler: TastyPickler) { assert(pickleParamRef(tpe), s"orphan parameter reference: $tpe") case tpe: LazyRef => pickleType(tpe.ref) - } - def pickleMethodic(tag: Int, tpe: LambdaType, mods: FlagSet)(using Context): Unit = { + def pickleMethodic(tag: Int, tpe: LambdaType, mods: FlagSet)(using Context): Unit = writeByte(tag) - withLength { + withLength: pickleType(tpe.resultType, richTypes = true) tpe.paramNames.lazyZip(tpe.paramInfos).foreach { (name, tpe) => pickleType(tpe); pickleName(name) } if (mods != EmptyFlags) pickleFlags(mods, tpe.isTermLambda) - } - } - def pickleParamRef(tpe: ParamRef)(using Context): Boolean = { + def pickleParamRef(tpe: ParamRef)(using Context): Boolean = val binder: Addr | Null = pickledTypes.lookup(tpe.binder) val pickled = binder != null - if (pickled) { + if (pickled) writeByte(PARAMtype) withLength { writeRef(binder.uncheckedNN); writeNat(tpe.paramNum) } - } pickled - } def pickleTpt(tpt: Tree)(using Context): Unit = pickleTree(tpt) - def pickleTreeUnlessEmpty(tree: Tree)(using Context): Unit = { + def pickleTreeUnlessEmpty(tree: Tree)(using Context): Unit = if (!tree.isEmpty) pickleTree(tree) - } - def pickleDef(tag: Int, mdef: MemberDef, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ())(using Context): Unit = { + def pickleDef(tag: Int, mdef: MemberDef, tpt: Tree, rhs: Tree = EmptyTree, pickleParams: => Unit = ())(using Context): Unit = val sym = mdef.symbol assert(symRefs(sym) == NoAddr, sym) @@ -330,16 +302,14 @@ class TreePickler(pickler: TastyPickler) { writeByte(tag) val addr = currentAddr try - withLength { + withLength: pickleName(sym.name) pickleParams - tpt match { + tpt match case _: Template | _: Hole => pickleTree(tpt) case _ if tpt.isType => pickleTpt(tpt) - } pickleTreeUnlessEmpty(rhs) pickleModifiers(sym, mdef) - } catch case ex: Throwable => if !ctx.settings.YnoDecodeStacktraces.value @@ -353,36 +323,30 @@ class TreePickler(pickler: TastyPickler) { val comment = docCtx.docstrings.lookup(sym) if comment != null then docStrings(mdef) = comment - } - def pickleParam(tree: Tree)(using Context): Unit = { + def pickleParam(tree: Tree)(using Context): Unit = registerTreeAddr(tree) - tree match { + tree match case tree: ValDef => pickleDef(PARAM, tree, tree.tpt) case tree: TypeDef => pickleDef(TYPEPARAM, tree, tree.rhs) - } - } - def pickleParams(trees: List[Tree])(using Context): Unit = { + def pickleParams(trees: List[Tree])(using Context): Unit = trees.foreach(preRegister) trees.foreach(pickleParam) - } - def pickleStats(stats: List[Tree])(using Context): Unit = { + def pickleStats(stats: List[Tree])(using Context): Unit = stats.foreach(preRegister) stats.foreach(stat => if (!stat.isEmpty) pickleTree(stat)) - } - def pickleTree(tree: Tree)(using Context): Unit = { + def pickleTree(tree: Tree)(using Context): Unit = val addr = registerTreeAddr(tree) - if (addr != currentAddr) { + if (addr != currentAddr) writeByte(SHAREDterm) writeRef(addr) - } else - try tree match { + try tree match case Ident(name) => - tree.tpe match { + tree.tpe match case tp: TermRef if name != nme.WILDCARD => // wildcards are pattern bound, need to be preserved as ids. pickleType(tp) @@ -390,24 +354,21 @@ class TreePickler(pickler: TastyPickler) { writeByte(if (tree.isType) IDENTtpt else IDENT) pickleName(name) pickleType(tp) - } case This(qual) => if (qual.isEmpty) pickleType(tree.tpe) - else { + else writeByte(QUALTHIS) val ThisType(tref) = tree.tpe: @unchecked pickleTree(qual.withType(tref)) - } case Select(qual, name) => - name match { + name match case OuterSelectName(_, levels) => writeByte(SELECTouter) - withLength { + withLength: writeNat(levels) pickleTree(qual) val SkolemType(tp) = tree.tpe: @unchecked pickleType(tp) - } case _ => val sig = tree.tpe.signature var ename = tree.symbol.targetName @@ -423,54 +384,43 @@ class TreePickler(pickler: TastyPickler) { pickleTree(qual) else // select from owner writeByte(SELECTin) - withLength { + withLength: pickleNameAndSig(name, tree.symbol.signature, ename) pickleTree(qual) pickleType(tree.symbol.owner.typeRef) - } - } case Apply(fun, args) => - if (fun.symbol eq defn.throwMethod) { + if (fun.symbol eq defn.throwMethod) writeByte(THROW) pickleTree(args.head) - } else if fun.symbol.originalSignaturePolymorphic.exists then writeByte(APPLYsigpoly) - withLength { + withLength: pickleTree(fun) pickleType(fun.tpe.widenTermRefExpr, richTypes = true) // this widens to a MethodType, so need richTypes args.foreach(pickleTree) - } - else { + else writeByte(APPLY) - withLength { + withLength: pickleTree(fun) args.foreach(pickleTree) - } - } case TypeApply(fun, args) => writeByte(TYPEAPPLY) - withLength { + withLength: pickleTree(fun) args.foreach(pickleTpt) - } case Literal(const1) => - pickleConstant { - tree.tpe match { + pickleConstant: + tree.tpe match case ConstantType(const2) => const2 case _ => const1 - } - } case Super(qual, mix) => writeByte(SUPER) - withLength { + withLength: pickleTree(qual); - if (!mix.isEmpty) { + if (!mix.isEmpty) // mixinType being a TypeRef when mix is non-empty is enforced by TreeChecker#checkSuper val SuperType(_, mixinType: TypeRef) = tree.tpe: @unchecked pickleTree(mix.withType(mixinType)) - } - } case New(tpt) => writeByte(NEW) pickleTpt(tpt) @@ -490,28 +440,25 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleTree(expr); stats.foreach(pickleTree) } case tree @ If(cond, thenp, elsep) => writeByte(IF) - withLength { + withLength: if (tree.isInline) writeByte(INLINE) pickleTree(cond) pickleTree(thenp) pickleTree(elsep) - } case Closure(env, meth, tpt) => writeByte(LAMBDA) assert(env.isEmpty) - withLength { + withLength: pickleTree(meth) if (tpt.tpe.exists) pickleTpt(tpt) - } case tree @ Match(selector, cases) => writeByte(MATCH) - withLength { + withLength: if (tree.isInline) if (selector.isEmpty) writeByte(IMPLICIT) else { writeByte(INLINE); pickleTree(selector) } else pickleTree(selector) tree.cases.foreach(pickleTree) - } case CaseDef(pat, guard, rhs) => writeByte(CASEDEF) withLength { pickleTree(pat); pickleTree(rhs); pickleTreeUnlessEmpty(guard) } @@ -530,38 +477,34 @@ class TreePickler(pickler: TastyPickler) { case Inlined(call, bindings, expansion) => writeByte(INLINED) bindings.foreach(preRegister) - withLength { + withLength: pickleTree(expansion) if (!call.isEmpty) pickleTree(call) bindings.foreach { b => assert(b.isInstanceOf[DefDef] || b.isInstanceOf[ValDef]) pickleTree(b) } - } case Bind(name, body) => val sym = tree.symbol registerDef(sym) writeByte(BIND) - withLength { + withLength: pickleName(name) pickleType(sym.info) pickleTree(body) pickleFlags(sym.flags &~ Case, sym.isTerm) - } case Alternative(alts) => writeByte(ALTERNATIVE) withLength { alts.foreach(pickleTree) } case UnApply(fun, implicits, patterns) => writeByte(UNAPPLY) - withLength { + withLength: pickleTree(fun) - for (implicitArg <- implicits) { + for (implicitArg <- implicits) writeByte(IMPLICITarg) pickleTree(implicitArg) - } pickleType(tree.tpe) patterns.foreach(pickleTree) - } case tree: ValDef => pickleDef(VALDEF, tree, tree.tpt, tree.rhs) case tree: DefDef => @@ -585,39 +528,32 @@ class TreePickler(pickler: TastyPickler) { registerDef(tree.symbol) writeByte(TEMPLATE) val (params, rest) = decomposeTemplateBody(tree.body) - withLength { + withLength: pickleParams(params) tree.parents.foreach(pickleTree) val cinfo @ ClassInfo(_, _, _, _, selfInfo) = tree.symbol.owner.info: @unchecked - if (!tree.self.isEmpty) { + if (!tree.self.isEmpty) writeByte(SELFDEF) pickleName(tree.self.name) if (!tree.self.tpt.isEmpty) pickleTree(tree.self.tpt) - else { + else if (!tree.self.isEmpty) registerTreeAddr(tree.self) - pickleType { - selfInfo match { + pickleType: + selfInfo match case sym: Symbol => sym.info case tp: Type => tp - } - } - } - } pickleStats(tree.constr :: rest) - } case Import(expr, selectors) => writeByte(IMPORT) - withLength { + withLength: pickleTree(expr) pickleSelectors(selectors) - } case Export(expr, selectors) => writeByte(EXPORT) - withLength { + withLength: pickleTree(expr) pickleSelectors(selectors) - } case PackageDef(pid, stats) => writeByte(PACKAGE) withLength { pickleType(pid.tpe); pickleStats(stats) } @@ -628,24 +564,22 @@ class TreePickler(pickler: TastyPickler) { pickleTree(ref) case RefinedTypeTree(parent, refinements) => if (refinements.isEmpty) pickleTree(parent) - else { + else val refineCls = refinements.head.symbol.owner.asClass registerDef(refineCls) pickledTypes(refineCls.typeRef) = currentAddr writeByte(REFINEDtpt) refinements.foreach(preRegister) withLength { pickleTree(parent); refinements.foreach(pickleTree) } - } case AppliedTypeTree(tycon, args) => writeByte(APPLIEDtpt) withLength { pickleTree(tycon); args.foreach(pickleTree) } case MatchTypeTree(bound, selector, cases) => writeByte(MATCHtpt) - withLength { + withLength: if (!bound.isEmpty) pickleTree(bound) pickleTree(selector) cases.foreach(pickleTree) - } case ByNameTypeTree(tp) => writeByte(BYNAMEtpt) pickleTree(tp) @@ -657,14 +591,13 @@ class TreePickler(pickler: TastyPickler) { withLength { pickleParams(tparams); pickleTree(body) } case TypeBoundsTree(lo, hi, alias) => writeByte(TYPEBOUNDStpt) - withLength { + withLength: pickleTree(lo); if alias.isEmpty then if hi ne lo then pickleTree(hi) else pickleTree(hi) pickleTree(alias) - } case tree @ Quote(body, Nil) => // TODO: Add QUOTE tag to TASTy assert(body.isTerm, @@ -687,20 +620,16 @@ class TreePickler(pickler: TastyPickler) { ) case Hole(_, idx, args, _) => writeByte(HOLE) - withLength { + withLength: writeNat(idx) pickleType(tree.tpe, richTypes = true) args.foreach(pickleTree) - } - } - catch { + catch case ex: TypeError => report.error(ex.toMessage, tree.srcPos.focus) case ex: AssertionError => println(i"error when pickling tree $tree") throw ex - } - } def pickleSelectors(selectors: List[untpd.ImportSelector])(using Context): Unit = for sel <- selectors do @@ -715,33 +644,29 @@ class TreePickler(pickler: TastyPickler) { pickleTree(tpt) case _ => - def pickleSelector(tag: Int, id: untpd.Ident)(using Context): Unit = { + def pickleSelector(tag: Int, id: untpd.Ident)(using Context): Unit = registerTreeAddr(id) writeByte(tag) pickleName(id.name) - } - def pickleModifiers(sym: Symbol, mdef: MemberDef)(using Context): Unit = { + def pickleModifiers(sym: Symbol, mdef: MemberDef)(using Context): Unit = import Flags._ var flags = sym.flags val privateWithin = sym.privateWithin - if (privateWithin.exists) { + if (privateWithin.exists) writeByte(if (flags.is(Protected)) PROTECTEDqualified else PRIVATEqualified) pickleType(privateWithin.typeRef) flags = flags &~ Protected - } if (flags.is(ParamAccessor) && sym.isTerm && !sym.isSetter) flags = flags &~ ParamAccessor // we only generate a tag for parameter setters pickleFlags(flags, sym.isTerm) val annots = sym.annotations.foreach(pickleAnnotation(sym, mdef, _)) - } - def pickleFlags(flags: FlagSet, isTerm: Boolean)(using Context): Unit = { + def pickleFlags(flags: FlagSet, isTerm: Boolean)(using Context): Unit = import Flags._ - def writeModTag(tag: Int) = { + def writeModTag(tag: Int) = assert(isModifierTag(tag)) writeByte(tag) - } assert(!flags.is(Scala2x)) if (flags.is(Private)) writeModTag(PRIVATE) if (flags.is(Protected)) writeModTag(PROTECTED) @@ -764,7 +689,7 @@ class TreePickler(pickler: TastyPickler) { if (flags.is(Exported)) writeModTag(EXPORTED) if (flags.is(Given)) writeModTag(GIVEN) if (flags.is(Implicit)) writeModTag(IMPLICIT) - if (isTerm) { + if (isTerm) if (flags.is(Lazy, butNot = Module)) writeModTag(LAZY) if (flags.is(AbsOverride)) { writeModTag(ABSTRACT); writeModTag(OVERRIDE) } if (flags.is(Mutable)) writeModTag(MUTABLE) @@ -776,8 +701,7 @@ class TreePickler(pickler: TastyPickler) { if (flags.is(ParamAccessor)) writeModTag(PARAMsetter) if (flags.is(SuperParamAlias)) writeModTag(PARAMalias) assert(!(flags.is(Label))) - } - else { + else if (flags.is(Sealed)) writeModTag(SEALED) if (flags.is(Abstract)) writeModTag(ABSTRACT) if (flags.is(Trait)) writeModTag(TRAIT) @@ -785,10 +709,8 @@ class TreePickler(pickler: TastyPickler) { if (flags.is(Contravariant)) writeModTag(CONTRAVARIANT) if (flags.is(Opaque)) writeModTag(OPAQUE) if (flags.is(Open)) writeModTag(OPEN) - } - } - private def isUnpicklable(owner: Symbol, ann: Annotation)(using Context) = ann match { + private def isUnpicklable(owner: Symbol, ann: Annotation)(using Context) = ann match case Annotation.Child(sym) => sym.isInaccessibleChildOf(owner) // If child annotation refers to a local class or enum value under // a different toplevel class, it is impossible to pickle a reference to it. @@ -796,7 +718,6 @@ class TreePickler(pickler: TastyPickler) { // See tests/pickling/i3149.scala case _ => ann.symbol == defn.BodyAnnot // inline bodies are reconstituted automatically when unpickling - } def pickleAnnotation(owner: Symbol, mdef: MemberDef, ann: Annotation)(using Context): Unit = if !isUnpicklable(owner, ann) then @@ -810,7 +731,7 @@ class TreePickler(pickler: TastyPickler) { // ---- main entry points --------------------------------------- - def pickle(trees: List[Tree])(using Context): Unit = { + def pickle(trees: List[Tree])(using Context): Unit = profile = Profile.current for tree <- trees do try @@ -827,9 +748,8 @@ class TreePickler(pickler: TastyPickler) { .map(sym => i"${sym.showLocated} (line ${sym.srcPos.line}) #${sym.id}") .toList assert(forwardSymRefs.isEmpty, i"unresolved symbols: $missing%, % when pickling ${ctx.source}") - } - def compactify(scratch: ScratchData = new ScratchData): Unit = { + def compactify(scratch: ScratchData = new ScratchData): Unit = buf.compactify(scratch) def updateMapWithDeltas(mp: MutableSymbolMap[Addr]) = @@ -847,5 +767,3 @@ class TreePickler(pickler: TastyPickler) { i += 1 updateMapWithDeltas(symRefs) - } -} diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index 98bd7152ff37..457fb5b5d985 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -55,7 +55,7 @@ import scala.annotation.internal.sharable class TreeUnpickler(reader: TastyReader, nameAtRef: NameTable, posUnpicklerOpt: Option[PositionUnpickler], - commentUnpicklerOpt: Option[CommentUnpickler]) { + commentUnpicklerOpt: Option[CommentUnpickler]): import TreeUnpickler._ import tpd._ @@ -99,27 +99,24 @@ class TreeUnpickler(reader: TastyReader, /** Enter all toplevel classes and objects into their scopes * @param roots a set of SymDenotations that should be overwritten by unpickling */ - def enter(roots: Set[SymDenotation])(using Context): Unit = { + def enter(roots: Set[SymDenotation])(using Context): Unit = this.roots = roots val rdr = new TreeReader(reader).fork ownerTree = new OwnerTree(NoAddr, 0, rdr.fork, reader.endAddr) if (rdr.isTopLevel) rdr.indexStats(reader.endAddr) - } /** The unpickled trees */ - def unpickle(mode: UnpickleMode)(using Context): List[Tree] = { + def unpickle(mode: UnpickleMode)(using Context): List[Tree] = if mode != UnpickleMode.TopLevel then rootOwner = ctx.owner assert(roots != null, "unpickle without previous enterTopLevel") val rdr = new TreeReader(reader) - mode match { + mode match case UnpickleMode.TopLevel => rdr.readTopLevel() case UnpickleMode.Term => rdr.readTree() :: Nil case UnpickleMode.TypeTree => rdr.readTpt() :: Nil - } - } - class Completer(reader: TastyReader)(using @constructorOnly _ctx: Context) extends LazyType { + class Completer(reader: TastyReader)(using @constructorOnly _ctx: Context) extends LazyType: import reader._ val owner = ctx.owner val mode = ctx.mode @@ -137,27 +134,24 @@ class TreeUnpickler(reader: TastyReader, |Run with -Ydebug-unpickling to see full stack trace.""") treeAtAddr(currentAddr) = try - atPhaseBeforeTransforms { + atPhaseBeforeTransforms: new TreeReader(reader).readIndexedDef()( using ctx.withOwner(owner).withModeBits(mode).withSource(source)) - } catch case ex: AssertionError => fail(ex) case ex: Exception => fail(ex) - } - class TreeReader(val reader: TastyReader) { + class TreeReader(val reader: TastyReader): import reader._ def forkAt(start: Addr): TreeReader = new TreeReader(subReader(start, endAddr)) def fork: TreeReader = forkAt(currentAddr) - def skipTree(tag: Int): Unit = { + def skipTree(tag: Int): Unit = if (tag >= firstLengthTreeTag) goto(readEnd()) else if (tag >= firstNatASTTreeTag) { readNat(); skipTree() } else if (tag >= firstASTTreeTag) skipTree() else if (tag >= firstNatTreeTag) readNat() - } def skipTree(): Unit = skipTree(readByte()) def skipParams(): Unit = @@ -172,65 +166,56 @@ class TreeUnpickler(reader: TastyReader, * Template node, but need to be listed separately in the OwnerTree of the enclosing class * in order not to confuse owner chains. */ - def scanTree(buf: ListBuffer[OwnerTree], mode: MemberDefMode = AllDefs): Unit = { + def scanTree(buf: ListBuffer[OwnerTree], mode: MemberDefMode = AllDefs): Unit = val start = currentAddr val tag = readByte() - tag match { + tag match case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | TEMPLATE => val end = readEnd() for (i <- 0 until numRefs(tag)) readNat() - if (tag == TEMPLATE) { + if (tag == TEMPLATE) // Read all member definitions now, whereas non-members are children of // template's owner tree. val nonMemberReader = fork scanTrees(buf, end, MemberDefsOnly) buf += new OwnerTree(start, tag, nonMemberReader, end) - } else if (mode != NoMemberDefs) buf += new OwnerTree(start, tag, fork, end) goto(end) case tag => if (mode == MemberDefsOnly) skipTree(tag) - else if (tag >= firstLengthTreeTag) { + else if (tag >= firstLengthTreeTag) val end = readEnd() var nrefs = numRefs(tag) - if (nrefs < 0) { + if (nrefs < 0) for (i <- nrefs until 0) scanTree(buf) goto(end) - } - else { + else for (i <- 0 until nrefs) readNat() if (tag == BIND) // a Bind is never the owner of anything, so we set `end = start` buf += new OwnerTree(start, tag, fork, end = start) scanTrees(buf, end) - } - } else if (tag >= firstNatASTTreeTag) { readNat(); scanTree(buf) } else if (tag >= firstASTTreeTag) scanTree(buf) else if (tag >= firstNatTreeTag) readNat() - } - } /** Record all directly nested definitions and templates between current address and `end` * as `OwnerTree`s in `buf` */ - def scanTrees(buf: ListBuffer[OwnerTree], end: Addr, mode: MemberDefMode = AllDefs): Unit = { + def scanTrees(buf: ListBuffer[OwnerTree], end: Addr, mode: MemberDefMode = AllDefs): Unit = while (currentAddr.index < end.index) scanTree(buf, mode) assert(currentAddr.index == end.index) - } /** The next tag, following through SHARED tags */ - def nextUnsharedTag: Int = { + def nextUnsharedTag: Int = val tag = nextByte - if (tag == SHAREDtype || tag == SHAREDterm) { + if (tag == SHAREDtype || tag == SHAREDterm) val lookAhead = fork lookAhead.reader.readByte() forkAt(lookAhead.reader.readAddr()).nextUnsharedTag - } else tag - } def readName(): TermName = nameAtRef(readNameRef()) @@ -241,10 +226,9 @@ class TreeUnpickler(reader: TastyReader, */ def readParamNamesAndMods(end: Addr): (List[Name], FlagSet) = val names = - collectWhile(currentAddr != end && !isModifierTag(nextByte)) { + collectWhile(currentAddr != end && !isModifierTag(nextByte)): skipTree() readName() - } var mods = EmptyFlags while currentAddr != end do // avoid boxing the mods readByte() match @@ -264,25 +248,23 @@ class TreeUnpickler(reader: TastyReader, def readSymRef()(using Context): Symbol = symbolAt(readAddr()) /** The symbol at given address; createa new one if none exists yet */ - def symbolAt(addr: Addr)(using Context): Symbol = symAtAddr.get(addr) match { + def symbolAt(addr: Addr)(using Context): Symbol = symAtAddr.get(addr) match case Some(sym) => sym case None => val sym = forkAt(addr).createSymbol()(using ctx.withOwner(ownerTree.findOwner(addr))) report.log(i"forward reference to $sym") sym - } /** The symbol defined by current definition */ - def symbolAtCurrent()(using Context): Symbol = symAtAddr.get(currentAddr) match { + def symbolAtCurrent()(using Context): Symbol = symAtAddr.get(currentAddr) match case Some(sym) => assert(ctx.owner == sym.owner, i"owner discrepancy for $sym, expected: ${ctx.owner}, found: ${sym.owner}") sym case None => createSymbol() - } - def readConstant(tag: Int)(using Context): Constant = (tag: @switch) match { + def readConstant(tag: Int)(using Context): Constant = (tag: @switch) match case UNITconst => Constant(()) case TRUEconst => @@ -309,24 +291,22 @@ class TreeUnpickler(reader: TastyReader, Constant(null) case CLASSconst => Constant(readType()) - } /** Read a type */ - def readType()(using Context): Type = { + def readType()(using Context): Type = val start = currentAddr val tag = readByte() pickling.println(s"reading type ${astTagToString(tag)} at $start, ${ctx.source}") - def registeringType[T](tp: Type, op: => T): T = { + def registeringType[T](tp: Type, op: => T): T = typeAtAddr(start) = tp op - } - def readLengthType(): Type = { + def readLengthType(): Type = val end = readEnd() def readMethodic[N <: Name, PInfo <: Type, LT <: LambdaType] - (companionOp: FlagSet => LambdaTypeCompanion[N, PInfo, LT], nameMap: Name => N): LT = { + (companionOp: FlagSet => LambdaTypeCompanion[N, PInfo, LT], nameMap: Name => N): LT = val result = typeAtAddr.getOrElse(start, { val nameReader = fork nameReader.skipTree() // skip result @@ -338,39 +318,35 @@ class TreeUnpickler(reader: TastyReader, }) goto(end) result.asInstanceOf[LT] - } def readVariances(tp: Type): Type = tp match case tp: HKTypeLambda if currentAddr != end => - val vs = until(end) { + val vs = until(end): readByte() match case STABLE => Invariant case COVARIANT => Covariant case CONTRAVARIANT => Contravariant - } tp.withVariances(vs) case _ => tp val result = - (tag: @switch) match { + (tag: @switch) match case TERMREFin => var sname = readName() val prefix = readType() val owner = readType() - sname match { + sname match case SignedName(name, sig, target) => TermRef(prefix, name, owner.decl(name).atSignature(sig, target).asSeenFrom(prefix)) case name => TermRef(prefix, name, owner.decl(name).asSeenFrom(prefix)) - } case TYPEREFin => val name = readName().toTypeName val prefix = readType() val space = readType() - space.decl(name) match { + space.decl(name) match case symd: SymDenotation if prefix.isArgPrefixOf(symd.symbol) => TypeRef(prefix, symd.symbol) case _ => TypeRef(prefix, name, space.decl(name).asSeenFrom(prefix)) - } case REFINEDtype => var name: Name = readName() val parent = readType() @@ -411,15 +387,12 @@ class TreeUnpickler(reader: TastyReader, case TYPELAMBDAtype => readMethodic(_ => HKTypeLambda, _.toTypeName) case PARAMtype => - readTypeRef() match { + readTypeRef() match case binder: LambdaType => binder.paramRefs(readNat()) - } - } assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}") result - } - def readSimpleType(): Type = (tag: @switch) match { + def readSimpleType(): Type = (tag: @switch) match case TYPEREFdirect | TERMREFdirect => NamedType(NoPrefix, readSymRef()) case TYPEREFsymbol | TERMREFsymbol => @@ -434,22 +407,20 @@ class TreeUnpickler(reader: TastyReader, case TERMREF => val sname = readName() val prefix = readType() - sname match { + sname match case SignedName(name, sig, target) => TermRef(prefix, name, prefix.member(name).atSignature(sig, target)) case name => TermRef(prefix, name) - } case THIS => ThisType.raw(readType().asInstanceOf[TypeRef]) case RECtype => - typeAtAddr.get(start) match { + typeAtAddr.get(start) match case Some(tp) => skipTree(tag) tp case None => RecType(rt => registeringType(rt, readType())) - } case RECthis => readTypeRef().asInstanceOf[RecType].recThis case SHAREDtype => @@ -460,30 +431,25 @@ class TreeUnpickler(reader: TastyReader, ExprType(if knowsPureFuns then arg else arg.adaptByNameArgUnderPureFuns) case _ => ConstantType(readConstant(tag)) - } if (tag < firstLengthTreeTag) readSimpleType() else readLengthType() - } - private def readSymNameRef()(using Context): Type = { + private def readSymNameRef()(using Context): Type = val sym = readSymRef() val prefix = readType() val res = NamedType(prefix, sym) - prefix match { + prefix match case prefix: ThisType if (prefix.cls eq sym.owner) && !sym.is(Opaque) => res.withDenot(sym.denot) // without this precaution we get an infinite cycle when unpickling pos/extmethods.scala // the problem arises when a self type of a trait is a type parameter of the same trait. case _ => res - } - } - private def readPackageRef()(using Context): TermSymbol = { + private def readPackageRef()(using Context): TermSymbol = val name = readName() if (name == nme.ROOT || name == nme.ROOTPKG) defn.RootPackage else if (name == nme.EMPTY_PACKAGE) defn.EmptyPackageVal else requiredPackage(name) - } def readTypeRef(): Type = typeAtAddr(readAddr()) @@ -505,7 +471,7 @@ class TreeUnpickler(reader: TastyReader, private def localContext(owner: Symbol)(using Context) = ctx.fresh.setOwner(owner) - private def normalizeFlags(tag: Int, givenFlags: FlagSet, name: Name, isAbsType: Boolean, rhsIsEmpty: Boolean)(using Context): FlagSet = { + private def normalizeFlags(tag: Int, givenFlags: FlagSet, name: Name, isAbsType: Boolean, rhsIsEmpty: Boolean)(using Context): FlagSet = val lacksDefinition = rhsIsEmpty && name.isTermName && !name.isConstructorName && !givenFlags.isOneOf(TermParamOrAccessor) || @@ -517,17 +483,14 @@ class TreeUnpickler(reader: TastyReader, flags |= (if (tag == VALDEF) ModuleValCreationFlags else ModuleClassCreationFlags) if flags.is(Enum, butNot = Method) && name.isTermName then flags |= StableRealizable - if (ctx.owner.isClass) { + if (ctx.owner.isClass) if (tag == TYPEPARAM) flags |= Param - else if (tag == PARAM) { + else if (tag == PARAM) flags |= ParamAccessor if (!rhsIsEmpty) // param alias flags |= Method - } - } else if (isParamTag(tag)) flags |= Param flags - } def isAbstractType(name: Name)(using Context): Boolean = nextByte match case SHAREDtype => @@ -555,7 +518,7 @@ class TreeUnpickler(reader: TastyReader, /** Create symbol of definition node and enter in symAtAddr map * @return the created symbol */ - def createSymbol()(using Context): Symbol = nextByte match { + def createSymbol()(using Context): Symbol = nextByte match case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM => createMemberSymbol() case BIND => @@ -566,28 +529,25 @@ class TreeUnpickler(reader: TastyReader, localDummy case tag => throw new Error(s"illegal createSymbol at $currentAddr, tag = $tag") - } - private def createBindSymbol()(using Context): Symbol = { + private def createBindSymbol()(using Context): Symbol = val start = currentAddr val tag = readByte() val end = readEnd() var name: Name = readName() if nextUnsharedTag == TYPEBOUNDS then name = name.toTypeName val typeReader = fork - val completer = new LazyType { + val completer = new LazyType: def complete(denot: SymDenotation)(using Context) = denot.info = typeReader.readType() - } val sym = newSymbol(ctx.owner, name, Flags.Case, completer, coord = coordAt(start)) registerSym(start, sym) sym - } /** Create symbol of member definition or parameter node and enter in symAtAddr map * @return the created symbol */ - def createMemberSymbol()(using Context): Symbol = { + def createMemberSymbol()(using Context): Symbol = val start = currentAddr val tag = readByte() val end = readEnd() @@ -609,7 +569,7 @@ class TreeUnpickler(reader: TastyReader, if (flags.is(Module)) adjustModuleCompleter(completer, name) else completer val coord = coordAt(start) val sym = - roots.find(root => (root.owner eq ctx.owner) && root.name == name) match { + roots.find(root => (root.owner eq ctx.owner) && root.name == name) match case Some(rootd) => pickling.println(i"overwriting ${rootd.symbol} # ${rootd.hashCode}") rootd.symbol.coord = coord @@ -625,14 +585,13 @@ class TreeUnpickler(reader: TastyReader, newClassSymbol(ctx.owner, name.asTypeName, flags, completer, privateWithin, coord) else newSymbol(ctx.owner, name, flags, completer, privateWithin, coord) - } val annotOwner = if sym.owner.isClass then newLocalDummy(sym.owner) else sym.owner val annots = annotFns.map(_(annotOwner)) sym.annotations = annots if sym.isOpaqueAlias then sym.setFlag(Deferred) val isScala2MacroDefinedInScala3 = flags.is(Macro, butNot = Inline) && flags.is(Erased) - ctx.owner match { + ctx.owner match case cls: ClassSymbol if !isScala2MacroDefinedInScala3 || cls == defn.StringContextClass => // Enter all members of classes that are not Scala 2 macros. // @@ -643,47 +602,41 @@ class TreeUnpickler(reader: TastyReader, // But dual macros will never be needed for those definitions due to their intinsic nature. cls.enter(sym) case _ => - } registerSym(start, sym) - if (isClass) { + if (isClass) if sym.owner.is(Package) && annots.exists(_.hasSymbol(defn.WithPureFunsAnnot)) then knowsPureFuns = true sym.completer.withDecls(newScope) forkAt(templateStart).indexTemplateParams()(using localContext(sym)) - } else if (sym.isInlineMethod && !sym.is(Deferred)) sym.addAnnotation(LazyBodyAnnotation { (ctx0: Context) ?=> val ctx1 = localContext(sym)(using ctx0).addMode(Mode.ReadPositions) - inContext(sourceChangeContext(Addr(0))(using ctx1)) { + inContext(sourceChangeContext(Addr(0))(using ctx1)): // avoids space leaks by not capturing the current context forkAt(rhsStart).readTree() - } }) goto(start) sym - } /** Read modifier list into triplet of flags, annotations and a privateWithin * boundary symbol. */ - def readModifiers(end: Addr)(using Context): (FlagSet, List[Symbol => Annotation], Symbol) = { + def readModifiers(end: Addr)(using Context): (FlagSet, List[Symbol => Annotation], Symbol) = var flags: FlagSet = EmptyFlags var annotFns: List[Symbol => Annotation] = Nil var privateWithin: Symbol = NoSymbol - while (currentAddr.index != end.index) { - def addFlag(flag: FlagSet) = { + while (currentAddr.index != end.index) + def addFlag(flag: FlagSet) = flags |= flag readByte() - } - nextByte match { + nextByte match case PRIVATE => addFlag(Private) case PROTECTED => addFlag(Protected) case ABSTRACT => readByte() - nextByte match { + nextByte match case OVERRIDE => addFlag(AbsOverride) case _ => flags |= Abstract - } case FINAL => addFlag(Final) case SEALED => addFlag(Sealed) case CASE => addFlag(Case) @@ -728,10 +681,7 @@ class TreeUnpickler(reader: TastyReader, annotFns = readAnnot :: annotFns case tag => assert(false, s"illegal modifier tag $tag at $currentAddr, end = $end") - } - } (flags, annotFns.reverse, privateWithin) - } private def readWithin(using Context): Symbol = readType().typeSymbol @@ -756,10 +706,10 @@ class TreeUnpickler(reader: TastyReader, * @return the largest subset of {NoInits, PureInterface} that a * trait owning the indexed statements can have as flags. */ - def indexStats(end: Addr)(using Context): FlagSet = { + def indexStats(end: Addr)(using Context): FlagSet = var initsFlags = NoInitsInterface while (currentAddr.index < end.index) - nextByte match { + nextByte match case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM => val sym = symbolAtCurrent() skipTree() @@ -775,48 +725,43 @@ class TreeUnpickler(reader: TastyReader, case _ => skipTree() initsFlags = EmptyFlags - } assert(currentAddr.index == end.index) initsFlags - } /** Process package with given operation `op`. The operation takes as arguments * - a `RefTree` representing the `pid` of the package, * - an end address, * - a context which has the processed package as owner */ - def processPackage[T](op: (RefTree, Addr) => Context ?=> T)(using Context): T = { + def processPackage[T](op: (RefTree, Addr) => Context ?=> T)(using Context): T = val sctx = sourceChangeContext() if (sctx `ne` ctx) return processPackage(op)(using sctx) readByte() val end = readEnd() val pid = ref(readTreeRef()).asInstanceOf[RefTree] op(pid, end)(using localContext(pid.symbol.moduleClass)) - } /** Create symbols the longest consecutive sequence of parameters with given * `tag` starting at current address. */ def indexParams(tag: Int)(using Context): Unit = - while (nextByte == tag) { + while (nextByte == tag) symbolAtCurrent() skipTree() - } /** Create symbols for all type and value parameters of template starting * at current address. */ - def indexTemplateParams()(using Context): Unit = { + def indexTemplateParams()(using Context): Unit = assert(readByte() == TEMPLATE) readEnd() indexParams(TYPEPARAM) indexParams(PARAM) - } /** If definition was already read by a completer, return the previously read tree * or else read definition. */ - def readIndexedDef()(using Context): Tree = treeAtAddr.remove(currentAddr) match { + def readIndexedDef()(using Context): Tree = treeAtAddr.remove(currentAddr) match case Some(tree) => assert(tree != PoisonTree, s"Cyclic reference while unpickling definition at address ${currentAddr.index} in unit ${ctx.compilationUnit}") skipTree() @@ -827,9 +772,8 @@ class TreeUnpickler(reader: TastyReader, val tree = readNewDef() treeAtAddr.remove(start) tree - } - private def readNewDef()(using Context): Tree = { + private def readNewDef()(using Context): Tree = val sctx = sourceChangeContext() if (sctx `ne` ctx) return readNewDef()(using sctx) val start = currentAddr @@ -854,9 +798,8 @@ class TreeUnpickler(reader: TastyReader, EmptyTree else if sym.isInlineMethod && !sym.is(Deferred) then // The body of an inline method is stored in an annotation, so no need to unpickle it again - new Trees.Lazy[Tree] { + new Trees.Lazy[Tree]: def complete(using Context) = inlines.Inlines.bodyToInline(sym) - } else readLater(end, _.readTree()) @@ -876,7 +819,7 @@ class TreeUnpickler(reader: TastyReader, val name = readName() pickling.println(s"reading def of $name at $start") - val tree: MemberDef = tag match { + val tree: MemberDef = tag match case DEFDEF => val paramDefss = readParamss()(using localCtx) val tpt = readTpt()(using localCtx) @@ -894,7 +837,7 @@ class TreeUnpickler(reader: TastyReader, sym.info = tpt.tpe ValDef(tpt) case TYPEDEF | TYPEPARAM => - if (sym.isClass) { + if (sym.isClass) sym.owner.ensureCompleted() // scalacLinkedClass uses unforcedDecls. Make sure it does not miss anything. val companion = sym.scalacLinkedClass @@ -906,8 +849,7 @@ class TreeUnpickler(reader: TastyReader, if (companion.exists && isCodefined) sym.registerCompanion(companion) TypeDef(readTemplate(using localCtx)) - } - else { + else sym.info = TypeBounds.empty // needed to avoid cyclic references when unpickling rhs, see i3816.scala sym.setFlag(Provisional) val rhs = readTpt()(using localCtx) @@ -931,13 +873,11 @@ class TreeUnpickler(reader: TastyReader, sym.resetFlag(Provisional) TypeDef(rhs) - } case PARAM => val tpt = readTpt()(using localCtx) assert(nothingButMods(end)) sym.info = tpt.tpe ValDef(tpt) - } goto(end) setSpan(start, tree) @@ -948,17 +888,15 @@ class TreeUnpickler(reader: TastyReader, if !sym.isType && !sym.is(ParamAccessor) then sym.info = ta.avoidPrivateLeaks(sym) - if (ctx.settings.YreadComments.value) { + if (ctx.settings.YreadComments.value) assert(ctx.docCtx.isDefined, "`-Yread-docs` enabled, but no `docCtx` is set.") commentUnpicklerOpt.foreach { commentUnpickler => val comment = commentUnpickler.commentAt(start) ctx.docCtx.get.addDocstring(tree.symbol, comment) tree.setComment(comment) } - } tree.setDefTree - } /** Read enough of parent to determine its type, without reading arguments * of applications. This is necessary to make TreeUnpickler as lazy as Namer @@ -997,15 +935,14 @@ class TreeUnpickler(reader: TastyReader, * parsed in this way as InferredTypeTrees. */ def readParents(withArgs: Boolean)(using Context): List[Tree] = - collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) { + collectWhile(nextByte != SELFDEF && nextByte != DEFDEF): nextUnsharedTag match case APPLY | TYPEAPPLY | BLOCK => if withArgs then readTree() else InferredTypeTree().withType(readParentType()) case _ => readTpt() - } - private def readTemplate(using Context): Template = { + private def readTemplate(using Context): Template = val start = currentAddr assert(sourcePathAt(start).isEmpty) val cls = ctx.owner.asClass @@ -1021,20 +958,18 @@ class TreeUnpickler(reader: TastyReader, val vparams = readIndexedParams[ValDef](PARAM) // It's important to index the class definitions before unpickling the parents // (see the parents-cycle test for examples where this matter) - val bodyFlags = { + val bodyFlags = val bodyIndexer = fork // The first DEFDEF corresponds to the primary constructor while (bodyIndexer.reader.nextByte != DEFDEF) bodyIndexer.skipTree() bodyIndexer.indexStats(end) - } val parentReader = fork val parents = readParents(withArgs = false)(using parentCtx) val parentTypes = parents.map(_.tpe.dealias) val self = - if (nextByte == SELFDEF) { + if (nextByte == SELFDEF) readByte() untpd.ValDef(readName(), readTpt(), EmptyTree).withType(NoType) - } else EmptyValDef cls.setNoInitsFlags(parentsKind(parents), bodyFlags) cls.info = ClassInfo( @@ -1059,30 +994,25 @@ class TreeUnpickler(reader: TastyReader, setSpan(start, untpd.Template(constr, mappedParents, self, lazyStats) .withType(localDummy.termRef)) - } - def skipToplevel()(using Context): Unit= { - if (!isAtEnd && isTopLevel) { + def skipToplevel()(using Context): Unit= + if (!isAtEnd && isTopLevel) skipTree() skipToplevel() - } - } def isTopLevel(using Context): Boolean = nextByte == IMPORT || nextByte == PACKAGE - def readTopLevel()(using Context): List[Tree] = { + def readTopLevel()(using Context): List[Tree] = @tailrec def read(acc: ListBuffer[Tree]): List[Tree] = - if (isTopLevel) { + if (isTopLevel) acc += readIndexedStat(NoSymbol) if (!isAtEnd) read(acc) else acc.toList - } else // top-level trees which are not imports or packages are not part of tree acc.toList read(new ListBuffer[tpd.Tree]) - } - def readIndexedStat(exprOwner: Symbol)(using Context): Tree = nextByte match { + def readIndexedStat(exprOwner: Symbol)(using Context): Tree = nextByte match case TYPEDEF | VALDEF | DEFDEF => readIndexedDef() case IMPORT => @@ -1096,17 +1026,15 @@ class TreeUnpickler(reader: TastyReader, } case _ => readTree()(using ctx.withOwner(exprOwner)) - } inline def readImportOrExport(inline mkTree: - (Tree, List[untpd.ImportSelector]) => Tree)()(using Context): Tree = { + (Tree, List[untpd.ImportSelector]) => Tree)()(using Context): Tree = val start = currentAddr assert(sourcePathAt(start).isEmpty) readByte() readEnd() val expr = readTree() setSpan(start, mkTree(expr, readSelectors())) - } def readSelectors()(using Context): List[untpd.ImportSelector] = if nextByte == IMPORTED then @@ -1144,10 +1072,9 @@ class TreeUnpickler(reader: TastyReader, assert(currentAddr.index == end.index) k(buf.toList, curCtx) - def readStats[T](exprOwner: Symbol, end: Addr, k: (List[Tree], Context) => T = sameTrees)(using Context): T = { + def readStats[T](exprOwner: Symbol, end: Addr, k: (List[Tree], Context) => T = sameTrees)(using Context): T = fork.indexStats(end) readIndexedStats(exprOwner, end, k) - } private def sameTrees(xs: List[Tree], ctx: Context) = xs @@ -1162,22 +1089,20 @@ class TreeUnpickler(reader: TastyReader, // ------ Reading trees ----------------------------------------------------- - def readTree()(using Context): Tree = { + def readTree()(using Context): Tree = val sctx = sourceChangeContext() if (sctx `ne` ctx) return readTree()(using sctx) val start = currentAddr val tag = readByte() pickling.println(s"reading term ${astTagToString(tag)} at $start, ${ctx.source}") - def readPathTree(): Tree = { + def readPathTree(): Tree = goto(start) - readType() match { + readType() match case path: TypeRef => TypeTree(path) case path: TermRef => ref(path) case path: ThisType => untpd.This(untpd.EmptyTypeIdent).withType(path) case path: ConstantType => Literal(path.value) - } - } def makeSelect(qual: Tree, name: Name, denot: Denotation): Select = var qualType = qual.tpe.widenIfUnstable @@ -1197,14 +1122,13 @@ class TreeUnpickler(reader: TastyReader, val qual = readTree().asInstanceOf[untpd.Ident] (untpd.Ident(qual.name).withSpan(qual.span), qual.tpe.asInstanceOf[TypeRef]) - def accessibleDenot(qualType: Type, name: Name, sig: Signature, target: Name) = { + def accessibleDenot(qualType: Type, name: Name, sig: Signature, target: Name) = val pre = ctx.typeAssigner.maybeSkolemizePrefix(qualType, name) val d = qualType.findMember(name, pre).atSignature(sig, target) if (!d.symbol.exists || d.symbol.isAccessibleFrom(pre)) d else qualType.findMember(name, pre, excluded = Private).atSignature(sig, target) - } - def readSimpleTree(): Tree = tag match { + def readSimpleTree(): Tree = tag match case SHAREDterm => forkAt(readAddr()).readTree() case IDENT => @@ -1212,10 +1136,9 @@ class TreeUnpickler(reader: TastyReader, case IDENTtpt => untpd.Ident(readName().toTypeName).withType(readType()) case SELECT => - readName() match { + readName() match case SignedName(name, sig, target) => completeSelect(name, sig, target) case name => completeSelect(name, Signature.NotAMethod, EmptyTermName) - } case SELECTtpt => val name = readName().toTypeName completeSelect(name, Signature.NotAMethod, EmptyTermName) @@ -1235,7 +1158,6 @@ class TreeUnpickler(reader: TastyReader, NamedArg(readName(), readTree()) case _ => readPathTree() - } /** Adapt constructor calls where class has only using clauses from old to new scheme. * or class has mixed using clauses and other clauses. @@ -1286,10 +1208,10 @@ class TreeUnpickler(reader: TastyReader, tree.overwriteType(tree.tpe.simplified) tree - def readLengthTree(): Tree = { + def readLengthTree(): Tree = val end = readEnd() val result = - (tag: @switch) match { + (tag: @switch) match case SUPER => val qual = readTree() val (mixId, mixTpe) = ifBefore(end)(readQualId(), (untpd.EmptyTypeIdent, NoType)) @@ -1324,19 +1246,17 @@ class TreeUnpickler(reader: TastyReader, case INLINED => val exprReader = fork skipTree() - def maybeCall = nextUnsharedTag match { + def maybeCall = nextUnsharedTag match case VALDEF | DEFDEF => EmptyTree case _ => readTree() - } val call = ifBefore(end)(maybeCall, EmptyTree) val bindings = readStats(ctx.owner, end).asInstanceOf[List[ValOrDefDef]] val expansion = exprReader.readTree() // need bindings in scope, so needs to be read before Inlined(call, bindings, expansion) case IF => - if (nextByte == INLINE) { + if (nextByte == INLINE) readByte() InlineIf(readTree(), readTree(), readTree()) - } else If(readTree(), readTree(), readTree()) case LAMBDA => @@ -1413,10 +1333,9 @@ class TreeUnpickler(reader: TastyReader, case UNAPPLY => val fn = readTree() val implicitArgs = - collectWhile(nextByte == IMPLICITarg) { + collectWhile(nextByte == IMPLICITarg): readByte() readTree() - } val patType = readType() val argPats = until(end)(readTree()) UnApply(fn, implicitArgs, argPats, patType) @@ -1460,20 +1379,17 @@ class TreeUnpickler(reader: TastyReader, Hole(true, idx, args, EmptyTree, tpe) case _ => readPathTree() - } assert(currentAddr == end, s"$start $currentAddr $end ${astTagToString(tag)}") result - } val tree = if (tag < firstLengthTreeTag) readSimpleTree() else readLengthTree() setSpan(start, tree) - } - def readTpt()(using Context): Tree = { + def readTpt()(using Context): Tree = val sctx = sourceChangeContext() if (sctx `ne` ctx) return readTpt()(using sctx) val start = currentAddr - val tree = nextByte match { + val tree = nextByte match case SHAREDterm => readByte() forkAt(readAddr()).readTpt() @@ -1494,25 +1410,20 @@ class TreeUnpickler(reader: TastyReader, Hole(false, idx, args, EmptyTree, tpe) case _ => if (isTypeTreeTag(nextByte)) readTree() - else { + else val start = currentAddr val tp = readType() if (tp.exists) setSpan(start, TypeTree(tp)) else EmptyTree - } - } setSpan(start, tree) - } def readCases(end: Addr)(using Context): List[CaseDef] = - collectWhile((nextUnsharedTag == CASEDEF) && currentAddr != end) { - if (nextByte == SHAREDterm) { + collectWhile((nextUnsharedTag == CASEDEF) && currentAddr != end): + if (nextByte == SHAREDterm) readByte() forkAt(readAddr()).readCase()(using ctx.fresh.setNewScope) - } else readCase()(using ctx.fresh.setNewScope) - } - def readCase()(using Context): CaseDef = { + def readCase()(using Context): CaseDef = val sctx = sourceChangeContext() if (sctx `ne` ctx) return readCase()(using sctx) val start = currentAddr @@ -1522,58 +1433,53 @@ class TreeUnpickler(reader: TastyReader, val rhs = readTree() val guard = ifBefore(end)(readTree(), EmptyTree) setSpan(start, CaseDef(pat, guard, rhs)) - } def readLater[T <: AnyRef](end: Addr, op: TreeReader => Context ?=> T)(using Context): Trees.Lazy[T] = readLaterWithOwner(end, op)(ctx.owner) - def readLaterWithOwner[T <: AnyRef](end: Addr, op: TreeReader => Context ?=> T)(using Context): Symbol => Trees.Lazy[T] = { + def readLaterWithOwner[T <: AnyRef](end: Addr, op: TreeReader => Context ?=> T)(using Context): Symbol => Trees.Lazy[T] = val localReader = fork goto(end) val mode = ctx.mode val source = ctx.source owner => new LazyReader(localReader, owner, mode, source, op) - } // ------ Setting positions ------------------------------------------------ /** Pickled span for `addr`. */ def spanAt(addr: Addr)(using Context): Span = if (ctx.mode.is(Mode.ReadPositions)) - posUnpicklerOpt match { + posUnpicklerOpt match case Some(posUnpickler) => posUnpickler.spanAt(addr) case _ => NoSpan - } else NoSpan /** Coordinate for the symbol at `addr`. */ - def coordAt(addr: Addr)(using Context): Coord = { + def coordAt(addr: Addr)(using Context): Coord = val span = spanAt(addr) if (span.exists) spanCoord(span) else indexCoord(addr.index) - } /** Pickled source path at `addr`. */ def sourcePathAt(addr: Addr)(using Context): String = if (ctx.mode.is(Mode.ReadPositions)) - posUnpicklerOpt match { + posUnpicklerOpt match case Some(posUnpickler) => posUnpickler.sourcePathAt(addr) case _ => "" - } else "" /** If currentAddr carries a source path, the current context with * the source of that path, otherwise the current context itself. */ - def sourceChangeContext(addr: Addr = currentAddr)(using Context): Context = { + def sourceChangeContext(addr: Addr = currentAddr)(using Context): Context = val path = sourcePathAt(addr) - if (path.nonEmpty) { + if (path.nonEmpty) val sourceFile = ctx.getSource(path) posUnpicklerOpt match case Some(posUnpickler) if !sourceFile.initialized => @@ -1581,31 +1487,24 @@ class TreeUnpickler(reader: TastyReader, case _ => pickling.println(i"source change at $addr: $path") ctx.withSource(sourceFile) - } else ctx - } /** Set position of `tree` at given `addr`. */ - def setSpan[T <: untpd.Tree](addr: Addr, tree: T)(using Context): tree.type = { + def setSpan[T <: untpd.Tree](addr: Addr, tree: T)(using Context): tree.type = val span = spanAt(addr) if (span.exists) tree.span = span tree - } - } class LazyReader[T <: AnyRef]( reader: TreeReader, owner: Symbol, mode: Mode, source: SourceFile, - op: TreeReader => Context ?=> T) extends Trees.Lazy[T] { - def complete(using Context): T = { + op: TreeReader => Context ?=> T) extends Trees.Lazy[T]: + def complete(using Context): T = pickling.println(i"starting to read at ${reader.reader.currentAddr} with owner $owner") - atPhaseBeforeTransforms { + atPhaseBeforeTransforms: op(reader)(using ctx .withOwner(owner) .withModeBits(mode) .withSource(source)) - } - } - } /** A lazy datastructure that records how definitions are nested in TASTY data. * The structure is lazy because it needs to be computed only for forward references @@ -1619,55 +1518,46 @@ class TreeUnpickler(reader: TastyReader, * @param reader The reader to be used for scanning for children * @param end The end of the owning definition */ - class OwnerTree(val addr: Addr, tag: Int, reader: TreeReader, val end: Addr) { + class OwnerTree(val addr: Addr, tag: Int, reader: TreeReader, val end: Addr): private var myChildren: List[OwnerTree] = null /** All definitions that have the definition at `addr` as closest enclosing definition */ - def children: List[OwnerTree] = { - if (myChildren == null) myChildren = { + def children: List[OwnerTree] = + if (myChildren == null) myChildren = val buf = new ListBuffer[OwnerTree] reader.scanTrees(buf, end, if (tag == TEMPLATE) NoMemberDefs else AllDefs) buf.toList - } myChildren - } /** Find the owner of definition at `addr` */ - def findOwner(addr: Addr)(using Context): Symbol = { + def findOwner(addr: Addr)(using Context): Symbol = def search(cs: List[OwnerTree], current: Symbol): Symbol = - try cs match { + try cs match case ot :: cs1 => - if (ot.addr.index == addr.index) { + if (ot.addr.index == addr.index) assert(current.exists, i"no symbol at $addr") current - } else if (ot.addr.index < addr.index && addr.index < ot.end.index) search(ot.children, reader.symbolAt(ot.addr)) else search(cs1, current) case Nil => throw new TreeWithoutOwner - } - catch { + catch case ex: TreeWithoutOwner => pickling.println(i"no owner for $addr among $cs%, %") throw ex - } try search(children, rootOwner) - catch { + catch case ex: TreeWithoutOwner => pickling.println(s"ownerTree = $ownerTree") throw ex - } - } override def toString: String = s"OwnerTree(${addr.index}, ${end.index}, ${if (myChildren == null) "?" else myChildren.mkString(" ")})" - } -} -object TreeUnpickler { +object TreeUnpickler: /** Define the expected format of the tasty bytes * - TopLevel: Tasty that contains a full class nested in its package @@ -1675,14 +1565,13 @@ object TreeUnpickler { * - TypeTree: Tasty that contains only a type tree or a reference to a type */ sealed trait UnpickleMode - object UnpickleMode { + object UnpickleMode: /** Unpickle a full class in some package */ object TopLevel extends UnpickleMode /** Unpickle as a TermTree */ object Term extends UnpickleMode /** Unpickle as a TypeTree */ object TypeTree extends UnpickleMode - } /** A marker value used to detect cyclic reference while unpickling definitions. */ @sharable val PoisonTree: tpd.Tree = new EmptyTree @@ -1704,4 +1593,3 @@ object TreeUnpickler { * call that has otherwise only using clauses was inserted. */ val InsertedApplyToNone: Property.Key[Unit] = Property.Key() -} diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala index 2aeb1bdeefcc..053ace0ac754 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleBuffer.scala @@ -11,18 +11,17 @@ import Flags._ * @param from The first index where defined data are found * @param to The first index where new data can be written */ -class PickleBuffer(data: Array[Byte], from: Int, to: Int) { +class PickleBuffer(data: Array[Byte], from: Int, to: Int): var bytes: Array[Byte] = data var readIndex: Int = from var writeIndex: Int = to /** Double bytes array */ - private def dble(): Unit = { + private def dble(): Unit = val bytes1 = new Array[Byte](bytes.length * 2) System.arraycopy(bytes, 0, bytes1, 0, writeIndex) bytes = bytes1 - } def ensureCapacity(capacity: Int): Unit = while (bytes.length < writeIndex + capacity) dble() @@ -30,11 +29,10 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { // -- Basic output routines -------------------------------------------- /** Write a byte of data */ - def writeByte(b: Int): Unit = { + def writeByte(b: Int): Unit = if (writeIndex == bytes.length) dble() bytes(writeIndex) = b.toByte writeIndex += 1 - } /** Write a natural number in big endian format, base 128. * All but the last digits have bit 0x80 set. @@ -49,16 +47,14 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { * if the long value is in the range Int.MIN_VALUE to * Int.MAX_VALUE. */ - def writeLongNat(x: Long): Unit = { - def writeNatPrefix(x: Long): Unit = { + def writeLongNat(x: Long): Unit = + def writeNatPrefix(x: Long): Unit = val y = x >>> 7 if (y != 0L) writeNatPrefix(y) writeByte(((x & 0x7f) | 0x80).toInt) - } val y = x >>> 7 if (y != 0L) writeNatPrefix(y) writeByte((x & 0x7f).toInt) - } /** Write a natural number x at position pos. * If number is more than one byte, shift rest of array to make space. @@ -66,29 +62,26 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { * @param pos ... * @param x ... */ - def patchNat(pos: Int, x: Int): Unit = { - def patchNatPrefix(x: Int): Unit = { + def patchNat(pos: Int, x: Int): Unit = + def patchNatPrefix(x: Int): Unit = writeByte(0) System.arraycopy(bytes, pos, bytes, pos + 1, writeIndex - (pos + 1)) bytes(pos) = ((x & 0x7f) | 0x80).toByte val y = x >>> 7 if (y != 0) patchNatPrefix(y) - } bytes(pos) = (x & 0x7f).toByte val y = x >>> 7 if (y != 0) patchNatPrefix(y) - } /** Write a long number x in signed big endian format, base 256. * * @param x The long number to be written. */ - def writeLong(x: Long): Unit = { + def writeLong(x: Long): Unit = val y = x >> 8 val z = x & 0xff if (-y != (z >> 7)) writeLong(y) writeByte(z.toInt) - } // -- Basic input routines -------------------------------------------- @@ -96,15 +89,14 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { def peekByte(): Int = bytes(readIndex) /** Read a byte */ - def readByte(): Int = { + def readByte(): Int = val x = bytes(readIndex); readIndex += 1; x - } /** Read a natural number in big endian format, base 128. * All but the last digits have bit 0x80 set.*/ def readNat(): Int = readLongNat().toInt - def readLongNat(): Long = { + def readLongNat(): Long = var b = 0L var x = 0L while ({ @@ -114,25 +106,22 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { }) () x - } /** Read a long number in signed big endian format, base 256. */ - def readLong(len: Int): Long = { + def readLong(len: Int): Long = var x = 0L var i = 0 - while (i < len) { + while (i < len) x = (x << 8) + (readByte() & 0xff) i += 1 - } val leading = 64 - (len << 3) x << leading >> leading - } /** Returns the buffer as a sequence of (Int, Array[Byte]) representing * (tag, data) of the individual entries. Saves and restores buffer state. */ - def toIndexedSeq: IndexedSeq[(Int, Array[Byte])] = { + def toIndexedSeq: IndexedSeq[(Int, Array[Byte])] = val saved = readIndex readIndex = 0 readNat() ; readNat() // discarding version @@ -149,7 +138,6 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { readIndex = saved result.toIndexedSeq - } /** Perform operation op until the condition * readIndex == end is satisfied. @@ -176,25 +164,22 @@ class PickleBuffer(data: Array[Byte], from: Int, to: Int) { * @return an array mapping entry numbers to locations in * the byte array where the entries start. */ - def createIndex: Array[Int] = { + def createIndex: Array[Int] = val index = new Array[Int](readNat()) // nbEntries_Nat - for (i <- 0 until index.length) { + for (i <- 0 until index.length) index(i) = readIndex readByte() // skip type_Nat readIndex = readNat() + readIndex // read length_Nat, jump to next entry - } index - } -} -object PickleBuffer { +object PickleBuffer: private inline val ScalaFlagEnd = 48 private inline val ChunkBits = 8 private inline val ChunkSize = 1 << ChunkBits private type FlagMap = Array[Array[Long]] - private val (scalaTermFlagMap, scalaTypeFlagMap) = { + private val (scalaTermFlagMap, scalaTypeFlagMap) = import Scala2Flags._ val corr = Map( @@ -240,7 +225,7 @@ object PickleBuffer { // generate initial maps from Scala flags to Dotty flags val termMap, typeMap = new Array[Long](64) for (idx <- 0 until ScalaFlagEnd) - corr get (1L << idx) match { + corr get (1L << idx) match case Some((termFlag: FlagSet @unchecked, typeFlag: FlagSet @unchecked)) => termMap(idx) |= termFlag.bits typeMap(idx) |= typeFlag.bits @@ -248,34 +233,28 @@ object PickleBuffer { termMap(idx) |= commonFlag.toTermFlags.bits typeMap(idx) |= commonFlag.toTypeFlags.bits case _ => - } // Convert map so that it maps chunks of ChunkBits size at once // instead of single bits. - def chunkMap(xs: Array[Long]): FlagMap = { + def chunkMap(xs: Array[Long]): FlagMap = val size = (xs.length + ChunkBits - 1) / ChunkBits val chunked = Array.ofDim[Long](size, ChunkSize) var i = 0 - while (i < size) { + while (i < size) var j = 0 - while (j < ChunkSize) { + while (j < ChunkSize) var k = 0 - while (k < ChunkBits) { + while (k < ChunkBits) if ((j & (1 << k)) != 0) chunked(i)(j) |= xs(i * ChunkBits + k) k += 1 - } j += 1 - } i += 1 - } chunked - } (chunkMap(termMap), chunkMap(typeMap)) - } - def unpickleScalaFlags(sflags: Long, isType: Boolean): FlagSet = { + def unpickleScalaFlags(sflags: Long, isType: Boolean): FlagSet = val map: FlagMap = if (isType) scalaTypeFlagMap else scalaTermFlagMap val shift = ChunkBits val mask = ChunkSize - 1 @@ -288,5 +267,3 @@ object PickleBuffer { map(4)((sflags >>> (shift * 4)).toInt & mask) | map(5)((sflags >>> (shift * 5)).toInt & mask) ) - } -} diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala index f135de7e43e9..3dcd2b5d0c15 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/PickleFormat.scala @@ -13,7 +13,7 @@ package unpickleScala2 * @author Martin Odersky * @version 1.0 */ -object PickleFormat { +object PickleFormat: /*************************************************** * Symbol table attribute format: @@ -218,7 +218,6 @@ object PickleFormat { inline val firstSymTag = NONEsym inline val lastSymTag = VALsym inline val lastExtSymTag = EXTMODCLASSref -} //The following two are no longer accurate, because ANNOTATEDtpe, diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala index cc2d7dd7ee56..df1fbc1b3adb 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Erasure.scala @@ -180,19 +180,19 @@ object Scala2Erasure: !that.isInstanceOf[Scala2RefinedType] && psym.match case sym1: Symbol => that match - case sym2: Symbol => - if sym1.isClass && sym2.isClass then - sym1.derivesFrom(sym2) - else if !sym1.isClass then - goUpperBound(sym1) - else + case sym2: Symbol => + if sym1.isClass && sym2.isClass then + sym1.derivesFrom(sym2) + else if !sym1.isClass then + goUpperBound(sym1) + else // sym2 is an abstract type, return false because // `isNonBottomSubClass` in Scala 2 never considers a class C to // be a a sub of an abstract type T, even if it was declared as // `type T >: C`. - false - case _ => - goUpperBound(sym1) + false + case _ => + goUpperBound(sym1) case tp1: StructuralRef => goUpperBound(tp1) case tp1: RefinedType => @@ -222,10 +222,10 @@ object Scala2Erasure: */ def intersectionDominator(parents: List[Type])(using Context): Type = val psyms = parents.map(pseudoSymbol) - if (psyms.contains(defn.ArrayClass)) { + if (psyms.contains(defn.ArrayClass)) defn.ArrayOf( intersectionDominator(parents.collect { case defn.ArrayOf(arg) => arg })) - } else { + else def isUnshadowed(psym: PseudoSymbol) = !(psyms.exists(qsym => !psym.sameSymbol(qsym) && qsym.isNonBottomSubClass(psym))) val cs = parents.iterator.filter { p => @@ -233,7 +233,6 @@ object Scala2Erasure: psym.isClass && !psym.isTrait && isUnshadowed(psym) } (if (cs.hasNext) cs else parents.iterator.filter(p => isUnshadowed(pseudoSymbol(p)))).next() - } /** A flattened list of parents of this intersection. * diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Flags.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Flags.scala index 6cf08b3384d9..266f79b0bace 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Flags.scala @@ -11,7 +11,7 @@ package unpickleScala2 /** Scala2 flags, adapted from https://github.com/scala/scala/blob/2.11.x/src/reflect/scala/reflect/internal/Flags.scala */ -object Scala2Flags { +object Scala2Flags: inline val IMPLICIT = 1 << 9 inline val FINAL = 1 << 5 // May not be overridden. Note that java final implies much more than scala final. inline val PRIVATE = 1 << 2 @@ -104,4 +104,3 @@ object Scala2Flags { inline val METHOD_PKL = (1 << 9) inline val MODULE_PKL = (1 << 10) inline val INTERFACE_PKL = (1 << 11) -} diff --git a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala index 4db5158c0e7f..652e1767ad65 100644 --- a/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/unpickleScala2/Scala2Unpickler.scala @@ -35,21 +35,20 @@ import scala.annotation.switch import reporting._ import cc.{adaptFunctionTypeUnderPureFuns, adaptByNameArgUnderPureFuns} -object Scala2Unpickler { +object Scala2Unpickler: /** Exception thrown if classfile is corrupted */ class BadSignature(msg: String) extends RuntimeException(msg) - case class TempPolyType(tparams: List[TypeSymbol], tpe: Type) extends UncachedGroundType { + case class TempPolyType(tparams: List[TypeSymbol], tpe: Type) extends UncachedGroundType: override def fallbackToText(printer: Printer): Text = "[" ~ printer.dclsText(tparams, ", ") ~ "]" ~ printer.toText(tpe) - } /** Temporary type for classinfos, will be decomposed on completion of the class */ case class TempClassInfoType(parentTypes: List[Type], decls: Scope, clazz: Symbol) extends UncachedGroundType /** Convert temp poly type to poly type and leave other types alone. */ - def translateTempPoly(tp: Type)(using Context): Type = tp match { + def translateTempPoly(tp: Type)(using Context): Type = tp match case TempPolyType(tparams, restpe) => // This check used to read `owner.isTerm` but that wasn't always correct, // I'm not sure `owner.is(Method)` is 100% correct either but it seems to @@ -58,15 +57,13 @@ object Scala2Unpickler { (if (tparams.head.owner.is(Method)) PolyType else HKTypeLambda) .fromParams(tparams, restpe) case tp => tp - } - def addConstructorTypeParams(denot: SymDenotation)(using Context): Unit = { + def addConstructorTypeParams(denot: SymDenotation)(using Context): Unit = assert(denot.isConstructor) denot.info = PolyType.fromParams(denot.owner.typeParams, denot.info) - } - def ensureConstructor(cls: ClassSymbol, clsDenot: ClassDenotation, scope: Scope)(using Context): Unit = { - if (scope.lookup(nme.CONSTRUCTOR) == NoSymbol) { + def ensureConstructor(cls: ClassSymbol, clsDenot: ClassDenotation, scope: Scope)(using Context): Unit = + if (scope.lookup(nme.CONSTRUCTOR) == NoSymbol) val constr = newDefaultConstructor(cls) // Scala 2 traits have a constructor iff they have initialization code // In dotc we represent that as !StableRealizable, which is also owner.is(NoInits) @@ -75,10 +72,8 @@ object Scala2Unpickler { clsDenot.setFlag(NoInits) addConstructorTypeParams(constr) cls.enter(constr, scope) - } - } - def setClassInfo(denot: ClassDenotation, info: Type, fromScala2: Boolean, selfInfo: Type = NoType)(using Context): Unit = { + def setClassInfo(denot: ClassDenotation, info: Type, fromScala2: Boolean, selfInfo: Type = NoType)(using Context): Unit = val cls = denot.classSymbol val (tparams, TempClassInfoType(parents, decls, clazz)) = info match { case TempPolyType(tps, cinfo) => (tps, cinfo) @@ -86,7 +81,7 @@ object Scala2Unpickler { }: @unchecked val ost = if (selfInfo eq NoType) && denot.is(ModuleClass) then - val sourceModule = denot.sourceModule.orElse { + val sourceModule = denot.sourceModule.orElse: // For non-toplevel modules, `sourceModule` won't be set when completing // the module class, we need to go find it ourselves. val modName = cls.name.sourceModuleName @@ -94,7 +89,6 @@ object Scala2Unpickler { if cls.privateWithin.exists && cls.owner.is(Trait) then modName.expandedName(cls.owner) else EmptyTermName NamerOps.findModuleBuddy(modName, denot.owner.info.decls, alternate) - } denot.owner.thisType.select(sourceModule) else selfInfo val tempInfo = new TempClassInfo(denot.owner.thisType, cls, decls, ost) @@ -105,19 +99,17 @@ object Scala2Unpickler { val normalizedParents = if (fromScala2) defn.adjustForTuple(cls, tparams, parents1) else defn.adjustForBoxedUnit(cls, parents1) - for (tparam <- tparams) { + for (tparam <- tparams) val tsym = decls.lookup(tparam.name) if (tsym.exists) tsym.setFlag(TypeParam) else denot.enter(tparam, decls) - } if (!denot.flagsUNSAFE.isAllOf(JavaModule)) ensureConstructor(cls, denot, decls) val scalacCompanion = denot.classSymbol.scalacLinkedClass - def registerCompanionPair(module: Symbol, claz: Symbol) = { + def registerCompanionPair(module: Symbol, claz: Symbol) = module.registerCompanion(claz) claz.registerCompanion(module) - } if (denot.flagsUNSAFE.is(Module)) registerCompanionPair(denot.classSymbol, scalacCompanion) @@ -127,8 +119,6 @@ object Scala2Unpickler { denot.info = tempInfo.finalized(normalizedParents) denot.ensureTypeParamsInCorrectOrder() defn.patchStdLibClass(denot) - } -} /** Unpickle symbol table information descending from a class and/or module root * from an array of bytes. @@ -137,7 +127,7 @@ object Scala2Unpickler { * @param moduleClassRoot the top-level module class which is unpickled, or NoSymbol if inapplicable */ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClassRoot: ClassDenotation)(ictx: Context) - extends PickleBuffer(bytes, 0, -1) with ClassfileParser.Embedded { + extends PickleBuffer(bytes, 0, -1) with ClassfileParser.Embedded: def showPickled(): Unit = atReadPos(0, () => { @@ -169,67 +159,55 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas /** A mapping from method types to the parameters used in constructing them */ private val paramsOfMethodType = new java.util.IdentityHashMap[MethodType, List[Symbol]] - protected def errorBadSignature(msg: String, original: Option[RuntimeException] = None)(using Context): Nothing = { + protected def errorBadSignature(msg: String, original: Option[RuntimeException] = None)(using Context): Nothing = val ex = new BadSignature( i"""error reading Scala signature of $classRoot from $source: |error occurred at position $readIndex: $msg""") if (ctx.settings.YdebugMissingRefs.value) original.getOrElse(ex).printStackTrace() throw ex - } - protected def handleRuntimeException(ex: RuntimeException)(using Context): Nothing = ex match { + protected def handleRuntimeException(ex: RuntimeException)(using Context): Nothing = ex match case ex: BadSignature => throw ex case _ => errorBadSignature(s"a runtime exception occurred: $ex", Some(ex)) - } def run()(using Context): Unit = - try { + try var i = 0 - while (i < index.length) { - if (entries(i) == null && isSymbolEntry(i)) { + while (i < index.length) + if (entries(i) == null && isSymbolEntry(i)) val savedIndex = readIndex readIndex = index(i) val sym = readSymbol() if sym.exists then entries(i) = sym - sym.infoOrCompleter match { + sym.infoOrCompleter match case info: ClassUnpickler => info.init() case _ => - } readIndex = savedIndex - } i += 1 - } // read children last, fix for #3951 i = 0 - while (i < index.length) { - if (entries(i) == null) { - if (isSymbolAnnotationEntry(i)) { + while (i < index.length) + if (entries(i) == null) + if (isSymbolAnnotationEntry(i)) val savedIndex = readIndex readIndex = index(i) readSymbolAnnotation() readIndex = savedIndex - } - else if (isChildrenEntry(i)) { + else if (isChildrenEntry(i)) val savedIndex = readIndex readIndex = index(i) readChildren() readIndex = savedIndex - } - } i += 1 - } - } - catch { + catch case ex: RuntimeException => handleRuntimeException(ex) - } - def source(using Context): AbstractFile = { + def source(using Context): AbstractFile = val f = classRoot.symbol.associatedFile if (f != null) f else moduleClassRoot.symbol.associatedFile - } - private def checkVersion(using Context): Unit = { + private def checkVersion(using Context): Unit = val major = readNat() val minor = readNat() if (major != MajorVersion || minor > MinorVersion) @@ -238,7 +216,6 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas MajorVersion + "." + MinorVersion + "\n found: " + major + "." + minor + " in " + source) - } private def checkScala2Stdlib(using Context): Unit = assert(!ctx.settings.Yscala2Stdlib.value, "No Scala 2 libraries should be unpickled under -Yscala2-stdlib") @@ -247,40 +224,35 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas protected def symScope(sym: Symbol): Scope = symScopes.getOrElseUpdate(sym, newScope(0)) /** Does entry represent an (internal) symbol */ - protected def isSymbolEntry(i: Int)(using Context): Boolean = { + protected def isSymbolEntry(i: Int)(using Context): Boolean = val tag = bytes(index(i)).toInt (firstSymTag <= tag && tag <= lastSymTag && (tag != CLASSsym || !isRefinementSymbolEntry(i))) - } /** Does entry represent an (internal or external) symbol */ - protected def isSymbolRef(i: Int): Boolean = { + protected def isSymbolRef(i: Int): Boolean = val tag = bytes(index(i)) (firstSymTag <= tag && tag <= lastExtSymTag) - } /** Does entry represent a name? */ - protected def isNameEntry(i: Int): Boolean = { + protected def isNameEntry(i: Int): Boolean = val tag = bytes(index(i)).toInt tag == TERMname || tag == TYPEname - } /** Does entry represent a symbol annotation? */ - protected def isSymbolAnnotationEntry(i: Int): Boolean = { + protected def isSymbolAnnotationEntry(i: Int): Boolean = val tag = bytes(index(i)).toInt tag == SYMANNOT - } /** Does the entry represent children of a symbol? */ - protected def isChildrenEntry(i: Int): Boolean = { + protected def isChildrenEntry(i: Int): Boolean = val tag = bytes(index(i)).toInt tag == CHILDREN - } /** Does entry represent a refinement symbol? * pre: Entry is a class symbol */ - protected def isRefinementSymbolEntry(i: Int)(using Context): Boolean = { + protected def isRefinementSymbolEntry(i: Int)(using Context): Boolean = val savedIndex = readIndex readIndex = index(i) val tag = readByte().toInt @@ -290,49 +262,42 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val result = readNameRef() == tpnme.REFINE_CLASS readIndex = savedIndex result - } protected def isRefinementClass(sym: Symbol)(using Context): Boolean = sym.name == tpnme.REFINE_CLASS protected def isLocal(sym: Symbol)(using Context): Boolean = isUnpickleRoot(sym.topLevelClass) - protected def isUnpickleRoot(sym: Symbol)(using Context): Boolean = { + protected def isUnpickleRoot(sym: Symbol)(using Context): Boolean = val d = sym.denot d == moduleRoot || d == moduleClassRoot || d == classRoot - } /** If entry at i is undefined, define it by performing * operation op with readIndex at start of i'th * entry. Restore readIndex afterwards. */ - protected def at[T <: AnyRef](i: Int, op: () => T): T = { + protected def at[T <: AnyRef](i: Int, op: () => T): T = var r = entries(i) - if (r eq null) { + if (r eq null) r = atReadPos(index(i), op) assert(entries(i) eq null, entries(i)) entries(i) = r - } r.asInstanceOf[T] - } - protected def atReadPos[T](start: Int, op: () => T): T = { + protected def atReadPos[T](start: Int, op: () => T): T = val savedIndex = readIndex readIndex = start try op() finally readIndex = savedIndex - } /** Read a name */ - protected def readName()(using Context): Name = { + protected def readName()(using Context): Name = val tag = readByte() val len = readNat() - tag match { + tag match case TERMname => termName(bytes, readIndex, len) case TYPEname => typeName(bytes, readIndex, len) case _ => errorBadSignature("bad name tag: " + tag) - } - } protected def readTermName()(using Context): TermName = readName().toTermName protected def readTypeName()(using Context): TypeName = readName().toTypeName @@ -340,13 +305,13 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas protected def readSymbol()(using Context): Symbol = readDisambiguatedSymbol(alwaysTrue)() /** Read a symbol, with possible disambiguation */ - protected def readDisambiguatedSymbol(p: Symbol => Boolean)()(using Context): Symbol = { + protected def readDisambiguatedSymbol(p: Symbol => Boolean)()(using Context): Symbol = val start = indexCoord(readIndex) val tag = readByte() val end = readNat() + readIndex def atEnd = readIndex == end - def readExtSymbol(): Symbol = { + def readExtSymbol(): Symbol = val nameRef = readNameRef() var name = nameRef.decode @@ -361,26 +326,23 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val owner = if (atEnd) loadingMirror.RootClass else readSymbolRef() - def adjust(denot: Denotation) = { + def adjust(denot: Denotation) = val denot1 = denot.disambiguate(p) val sym = denot1.symbol - if (denot.exists && !denot1.exists) { // !!!DEBUG + if (denot.exists && !denot1.exists) // !!!DEBUG val alts = denot.alternatives map (d => s"$d:${d.info}/${d.signature}") System.err.println(s"!!! disambiguation failure: $alts") val members = denot.alternatives.head.symbol.owner.info.decls.toList map (d => s"$d:${d.info}/${d.signature}") System.err.println(s"!!! all members: $members") - } if (tag == EXTref) sym else sym.moduleClass - } - def fromName(name: Name): Symbol = name.toTermName match { + def fromName(name: Name): Symbol = name.toTermName match case nme.ROOT => loadingMirror.RootClass case nme.ROOTPKG => loadingMirror.RootPackage case _ => def declIn(owner: Symbol) = adjust(owner.info.decl(name)) val sym = declIn(owner) if (sym.exists || owner.ne(defn.ObjectClass)) sym else declIn(defn.AnyClass) - } def slowSearch(name: Name): Symbol = owner.info.decls.find(_.name == name) @@ -393,11 +355,10 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas //if (owner.isOverloaded) // return NoSymbol - if (tag == EXTMODCLASSref) { + if (tag == EXTMODCLASSref) val module = owner.info.decl(name.toTermName).suchThat(_.is(Module)) module.info // force it, as completer does not yet point to module class. module.symbol.moduleClass - } /* was: val moduleVar = owner.info.decl(name.toTermName.moduleVarName).symbol @@ -409,15 +370,15 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas // println(s"read ext symbol $name from ${owner.denot.debugString} in ${classRoot.debugString}") // !!! DEBUG // (1) Try name. - fromName(name) orElse { + fromName(name) orElse: // (2) Try with expanded name. Can happen if references to private // symbols are read from outside: for instance when checking the children // of a class. See #1722. - fromName(name.toTermName.expandedName(owner)) orElse { + fromName(name.toTermName.expandedName(owner)) orElse: // (3) Try as a nested object symbol. - nestedObjectSymbol orElse { + nestedObjectSymbol orElse: // (4) Call the mirror's "missing" hook. - adjust(missingHook(owner, name)) orElse { + adjust(missingHook(owner, name)) orElse: // println(owner.info.decls.toList.map(_.debugString).mkString("\n ")) // !!! DEBUG // } // (5) Create a stub symbol to defer hard failure a little longer. @@ -428,17 +389,11 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas System.err.println(i"**** slow search found: ${slowSearch(name)}") if (ctx.settings.YdebugMissingRefs.value) Thread.dumpStack() newStubSymbol(owner, name, source) - } - } - } - } - } - tag match { + tag match case NONEsym => return NoSymbol case EXTref | EXTMODCLASSref => return readExtSymbol() case _ => - } // symbols that were pickled with Pickler.writeSymInfo val nameref = readNat() @@ -460,18 +415,16 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas name = if (name == nme.TRAIT_CONSTRUCTOR) nme.CONSTRUCTOR else name.asTermName.unmangle(Scala2MethodNameKinds) - if ((flags.is(Scala2ExpandedName))) { + if ((flags.is(Scala2ExpandedName))) name = name.unmangle(ExpandedName) flags = flags &~ Scala2ExpandedName - } - if (flags.is(Scala2SuperAccessor)) { + if (flags.is(Scala2SuperAccessor)) /* Scala 2 super accessors are pickled as private, but are compiled as public expanded. * Dotty super accessors, however, are already pickled as public expanded. * We bridge the gap right now. */ name = name.asTermName.unmangle(SuperAccessorName).expandedName(owner) flags = flags &~ (Scala2SuperAccessor | Private) - } name = name.mapLast(_.decode) def nameMatches(rootName: Name) = name == rootName @@ -483,7 +436,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas //if (isModuleRoot) println(s"moduleRoot of $moduleRoot found at $readIndex, flags = ${flags.flagsString}") // !!! DEBUG //if (isModuleClassRoot) println(s"moduleClassRoot of $moduleClassRoot found at $readIndex, flags = ${flags.flagsString}") // !!! DEBUG - def completeRoot(denot: ClassDenotation, completer: LazyType, privateWithin: Symbol): Symbol = { + def completeRoot(denot: ClassDenotation, completer: LazyType, privateWithin: Symbol): Symbol = denot.setFlag(flags) denot.resetFlag(Touched) // allow one more completion @@ -499,12 +452,11 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas denot.setPrivateWithin(privateWithin) denot.info = completer denot.symbol - } - def finishSym(sym: Symbol): Symbol = { + def finishSym(sym: Symbol): Symbol = if (sym.isClass) sym.setFlag(Scala2x) - if (!(isRefinementClass(sym) || isUnpickleRoot(sym) || sym.is(Scala2Existential))) { + if (!(isRefinementClass(sym) || isUnpickleRoot(sym) || sym.is(Scala2Existential))) val owner = sym.owner val canEnter = owner.isClass && @@ -528,19 +480,15 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas if (canEnter) owner.asClass.enter(sym, symScope(owner)) - } sym - } - val (privateWithin, infoRef) = { + val (privateWithin, infoRef) = val ref = readNat() if (!isSymbolRef(ref)) (NoSymbol, ref) - else { + else val pw = at(ref, () => readSymbol()) (pw, readNat()) - } - } finishSym(tag match { case TYPEsym | ALIASsym => @@ -555,34 +503,31 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas else if (isModuleClassRoot) completeRoot( moduleClassRoot, rootClassUnpickler(start, moduleClassRoot.symbol, moduleClassRoot.sourceModule, infoRef), privateWithin) - else { - def completer(cls: Symbol) = { + else + def completer(cls: Symbol) = val unpickler = new ClassUnpickler(infoRef) withDecls symScope(cls) if (flags.is(ModuleClass)) unpickler.withSourceModule( cls.owner.info.decls.lookup(cls.name.sourceModuleName) .suchThat(_.is(Module)).symbol) else unpickler - } newClassSymbol(owner, name.asTypeName, flags, completer, privateWithin, coord = start) - } case VALsym => newSymbol(owner, name.asTermName, flags, localMemberUnpickler, privateWithin, coord = start) case MODULEsym => - if (isModuleRoot) { + if (isModuleRoot) moduleRoot setFlag flags moduleRoot.symbol - } else newSymbol(owner, name.asTermName, flags, - new LocalUnpickler().withModuleClass( + else newSymbol(owner, name.asTermName, flags, + new LocalUnpickler().withModuleClass( owner.info.decls.lookup(name.moduleClassName) .suchThat(_.is(Module)).symbol) , privateWithin, coord = start) case _ => errorBadSignature("bad symbol tag: " + tag) }) - } - class LocalUnpickler extends LazyType { + class LocalUnpickler extends LazyType: def startCoord(denot: SymDenotation): Coord = denot.symbol.coord def paramssOfType(tp: Type): List[List[Symbol]] = tp match @@ -593,8 +538,8 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas if params == null then rest else params :: rest case _ => Nil - def complete(denot: SymDenotation)(using Context): Unit = try { - def parseToCompletion(denot: SymDenotation)(using Context) = { + def complete(denot: SymDenotation)(using Context): Unit = try + def parseToCompletion(denot: SymDenotation)(using Context) = val tag = readByte() val end = readNat() + readIndex def atEnd = readIndex == end @@ -613,7 +558,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas params = denot.owner.typeParams :: params denot.rawParamss = params - denot match { + denot match case denot: ClassDenotation if !isRefinementClass(denot.symbol) => val selfInfo = if (atEnd) NoType else readTypeRef() setClassInfo(denot, tp, fromScala2 = true, selfInfo) @@ -631,44 +576,35 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas addConstructorTypeParams(denot) if (atEnd) assert(!denot.symbol.isSuperAccessor, denot) - else { + else assert(denot.is(ParamAccessor) || denot.symbol.isSuperAccessor, denot) def disambiguate(alt: Symbol) = // !!! DEBUG - trace.onDebug(s"disambiguating ${denot.info} =:= ${denot.owner.thisType.memberInfo(alt)} ${denot.owner}") { + trace.onDebug(s"disambiguating ${denot.info} =:= ${denot.owner.thisType.memberInfo(alt)} ${denot.owner}"): denot.info matches denot.owner.thisType.memberInfo(alt) - } val alias = readDisambiguatedSymbolRef(disambiguate).asTerm if alias.name == denot.name then denot.setFlag(SuperParamAlias) - } - } // println(s"unpickled ${denot.debugString}, info = ${denot.info}") !!! DEBUG - } atReadPos(startCoord(denot).toIndex, () => withMode(Mode.Scala2Unpickling) { - atPhaseBeforeTransforms { + atPhaseBeforeTransforms: parseToCompletion(denot) - } }) - } - catch { + catch case ex: RuntimeException => handleRuntimeException(ex) - } - } object localMemberUnpickler extends LocalUnpickler - class ClassUnpickler(infoRef: Int) extends LocalUnpickler with TypeParamsCompleter { + class ClassUnpickler(infoRef: Int) extends LocalUnpickler with TypeParamsCompleter: private var myTypeParams: List[TypeSymbol] = null - private def readTypeParams()(using Context): Unit = { + private def readTypeParams()(using Context): Unit = val tag = readByte() val end = readNat() + readIndex myTypeParams = - if (tag == POLYtpe) { + if (tag == POLYtpe) val unusedRestpeRef = readNat() until(end, () => readSymbolRef()(using ctx)).asInstanceOf[List[TypeSymbol]] - } else Nil - } + else Nil private def loadTypeParams()(using Context) = atReadPos(index(infoRef), () => readTypeParams()(using ctx)) @@ -682,7 +618,6 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas override def completerTypeParams(sym: Symbol)(using Context): List[TypeSymbol] = init() - } def rootClassUnpickler(start: Coord, cls: Symbol, module: Symbol, infoRef: Int): ClassUnpickler = (new ClassUnpickler(infoRef) with SymbolLoaders.SecondCompleter { @@ -698,17 +633,16 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas * to * tp { name: T } */ - def elimExistentials(boundSyms: List[Symbol], tp: Type)(using Context): Type = { + def elimExistentials(boundSyms: List[Symbol], tp: Type)(using Context): Type = // Need to be careful not to run into cyclic references here (observed when // compiling t247.scala). That's why we avoid taking `symbol` of a TypeRef // unless names match up. val isBound = { (tp: Type) => - def refersTo(tp: Type, sym: Symbol): Boolean = tp match { + def refersTo(tp: Type, sym: Symbol): Boolean = tp match case tp: TypeRef => sym.name == tp.name && sym == tp.symbol case tp: TypeVar => refersTo(tp.underlying, sym) case tp : LazyRef => refersTo(tp.ref, sym) case _ => false - } boundSyms.exists(refersTo(tp, _)) } // Cannot use standard `existsPart` method because it calls `lookupRefined` @@ -720,14 +654,13 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas def removeSingleton(tp: Type): Type = if (tp isRef defn.SingletonClass) defn.AnyType else tp - def mapArg(arg: Type) = arg match { + def mapArg(arg: Type) = arg match case arg: TypeRef if isBound(arg) => arg.symbol.info case _ => arg - } - def elim(tp: Type): Type = tp match { + def elim(tp: Type): Type = tp match case tp @ RefinedType(parent, name, rinfo) => val parent1 = elim(tp.parent) - rinfo match { + rinfo match case TypeAlias(info: TypeRef) if isBound(info) => RefinedType(parent1, name, info.symbol.info) case info: TypeRef if isBound(info) => @@ -736,7 +669,6 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas RefinedType(parent1, name, info1.mapReduceAnd(removeSingleton)(_ & _)) case info => tp.derivedRefinedType(parent1, name, info) - } case tp @ AppliedType(tycon, args) => val tycon1 = tycon.safeDealias if (tycon1 ne tycon) elim(tycon1.appliedTo(args)) @@ -746,26 +678,22 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas tp.derivedAndOrType(mapArg(tp.tp1).bounds.hi, mapArg(tp.tp2).bounds.hi) case _ => tp - } val tp1 = elim(tp) - if (isBoundAccumulator(false, tp1)) { + if (isBoundAccumulator(false, tp1)) val anyTypes = boundSyms map (_ => defn.AnyType) val boundBounds = boundSyms map (_.info.bounds.hi) val tp2 = tp1.subst(boundSyms, boundBounds).subst(boundSyms, anyTypes) report.warning(FailureToEliminateExistential(tp, tp1, tp2, boundSyms, classRoot.symbol), NoSourcePosition) tp2 - } else tp1 - } /** Read type ref, mapping a TypeRef to a package to the package's ThisType * Package references should be TermRefs or ThisTypes but it was observed that * nsc sometimes pickles them as TypeRefs instead. */ - private def readPrefix()(using Context): Type = readTypeRef() match { + private def readPrefix()(using Context): Type = readTypeRef() match case pre: TypeRef if pre.symbol.is(Package) => pre.symbol.thisType case pre => pre - } /** Read a type * @@ -773,7 +701,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas * the flag say that a type of kind * is expected, so that PolyType(tps, restpe) can be disambiguated to PolyType(tps, NullaryMethodType(restpe)) * (if restpe is not a ClassInfoType, a MethodType or a NullaryMethodType, which leaves TypeRef/SingletonType -- the latter would make the polytype a type constructor) */ - protected def readType()(using Context): Type = { + protected def readType()(using Context): Type = def select(pre: Type, sym: Symbol): Type = // structural members need to be selected by name, their symbols are only // valid in the synthetic refinement class that defines them. @@ -781,7 +709,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val tag = readByte() val end = readNat() + readIndex - (tag: @switch) match { + (tag: @switch) match case NOtpe => NoType case NOPREFIXtpe => @@ -803,34 +731,30 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas case TYPEREFtpe => var pre = readPrefix() val sym = readSymbolRef() - pre match { + pre match case thispre: ThisType => // The problem is that class references super.C get pickled as // this.C. Dereferencing the member might then get an overriding class // instance. The problem arises for instance for LinkedHashMap#MapValues // and also for the inner Transform class in all views. We fix it by // replacing the this with the appropriate super. - if (sym.owner != thispre.cls) { + if (sym.owner != thispre.cls) val overriding = thispre.cls.info.decls.lookup(sym.name) - if (overriding.exists && overriding != sym) { + if (overriding.exists && overriding != sym) val base = pre.baseType(sym.owner) assert(base.exists) pre = SuperType(thispre, base) - } - } case NoPrefix if sym.is(TypeParam) => pre = sym.owner.thisType case _ => - } val tycon = select(pre, sym) val args = until(end, () => readTypeRef()) if (sym == defn.ByNameParamClass2x) ExprType(args.head.adaptByNameArgUnderPureFuns) else if (ctx.settings.scalajs.value && args.length == 2 && - sym.owner == JSDefinitions.jsdefn.ScalaJSJSPackageClass && sym == JSDefinitions.jsdefn.PseudoUnionClass) { + sym.owner == JSDefinitions.jsdefn.ScalaJSJSPackageClass && sym == JSDefinitions.jsdefn.PseudoUnionClass) // Treat Scala.js pseudo-unions as real unions, this requires a // special-case in erasure, see TypeErasure#eraseInfo. OrType(args(0), args(1), soft = false) - } else if args.nonEmpty then tycon.safeAppliedTo(EtaExpandIfHK(sym.typeParams, args.map(translateTempPoly))) .adaptFunctionTypeUnderPureFuns @@ -847,12 +771,11 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val parents = until(end, () => readTypeRef()) val parent = parents.reduceLeft(AndType(_, _)) if (decls.isEmpty) parent - else { + else def subst(info: Type, rt: RecType) = info.substThis(clazz.asClass, rt.recThis) def addRefinement(tp: Type, sym: Symbol) = RefinedType(tp, sym.name, sym.info) val refined = decls.toList.foldLeft(parent)(addRefinement) RecType.closeOver(rt => refined.substThis(clazz, rt.recThis)) - } case CLASSINFOtpe => val clazz = readSymbolRef() TempClassInfoType(until(end, () => readTypeRef()), symScope(clazz), clazz) @@ -885,27 +808,23 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas AnnotatedType.make(readTypeRef(), until(end, () => readAnnotationRef())) case _ => noSuchTypeTag(tag, end) - } - } - def readTypeParams()(using Context): List[Symbol] = { + def readTypeParams()(using Context): List[Symbol] = val tag = readByte() val end = readNat() + readIndex - if (tag == POLYtpe) { + if (tag == POLYtpe) val unusedRestperef = readNat() until(end, () => readSymbolRef()) - } else Nil - } def noSuchTypeTag(tag: Int, end: Int)(using Context): Type = errorBadSignature("bad type tag: " + tag) /** Read a constant */ - protected def readConstant()(using Context): Constant | TermRef = { + protected def readConstant()(using Context): Constant | TermRef = val tag = readByte().toInt val len = readNat() - (tag: @switch) match { + (tag: @switch) match case LITERALunit => Constant(()) case LITERALboolean => Constant(readLong(len) != 0L) case LITERALbyte => Constant(readLong(len).toByte) @@ -920,41 +839,35 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas case LITERALclass => Constant(readTypeRef()) case LITERALenum => readSymbolRef().termRef case _ => noSuchConstantTag(tag, len) - } - } def noSuchConstantTag(tag: Int, len: Int)(using Context): Constant = errorBadSignature("bad constant tag: " + tag) /** Read children and store them into the corresponding symbol. */ - protected def readChildren()(using Context): Unit = { + protected def readChildren()(using Context): Unit = val tag = readByte() assert(tag == CHILDREN) val end = readNat() + readIndex val target = readSymbolRef() - while (readIndex != end) { + while (readIndex != end) val start = readIndex readNat() // skip reference for now target.addAnnotation( Annotation.Child.later(atReadPos(start, () => readSymbolRef()), NoSpan)) - } - } /* Read a reference to a pickled item */ - protected def readSymbolRef()(using Context): Symbol = { //OPT inlined from: at(readNat(), readSymbol) to save on closure creation + protected def readSymbolRef()(using Context): Symbol = //OPT inlined from: at(readNat(), readSymbol) to save on closure creation val i = readNat() var r = entries(i) - if (r eq null) { + if (r eq null) val savedIndex = readIndex readIndex = index(i) r = readSymbol() assert(entries(i) eq null, entries(i)) entries(i) = r readIndex = savedIndex - } r.asInstanceOf[Symbol] - } protected def readDisambiguatedSymbolRef(p: Symbol => Boolean)(using Context): Symbol = at(readNat(), () => readDisambiguatedSymbol(p)()) @@ -977,96 +890,85 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas protected def readAnnotArg(i: Int)(using Context): untpd.Tree = untpd.TypedSplice(bytes(index(i)) match case TREE => at(i, () => readTree()) case _ => at(i, () => - readConstant() match - case c: Constant => Literal(c) - case tp: TermRef => ref(tp) - ) + readConstant() match + case c: Constant => Literal(c) + case tp: TermRef => ref(tp) + ) ) /** Read a ClassfileAnnotArg (argument to a classfile annotation) */ - private def readArrayAnnotArg()(using Context): untpd.Tree = { + private def readArrayAnnotArg()(using Context): untpd.Tree = readByte() // skip the `annotargarray` tag val end = readNat() + readIndex // array elements are trees representing instances of scala.annotation.Annotation untpd.JavaSeqLiteral( until(end, () => readClassfileAnnotArg(readNat())), untpd.TypeTree()) - } - private def readAnnotInfoArg()(using Context): untpd.Tree = untpd.TypedSplice { + private def readAnnotInfoArg()(using Context): untpd.Tree = untpd.TypedSplice: readByte() // skip the `annotinfo` tag val end = readNat() + readIndex readAnnotationContents(end) - } - protected def readClassfileAnnotArg(i: Int)(using Context): untpd.Tree = bytes(index(i)) match { + protected def readClassfileAnnotArg(i: Int)(using Context): untpd.Tree = bytes(index(i)) match case ANNOTINFO => at(i, () => readAnnotInfoArg()) case ANNOTARGARRAY => at(i, () => readArrayAnnotArg()) case _ => readAnnotArg(i) - } /** Read an annotation's contents. Not to be called directly, use * readAnnotation, readSymbolAnnotation, or readAnnotInfoArg */ - protected def readAnnotationContents(end: Int)(using Context): Tree = { + protected def readAnnotationContents(end: Int)(using Context): Tree = val atp = readTypeRef() - val args = { + val args = val t = new ListBuffer[untpd.Tree] - while (readIndex != end) { + while (readIndex != end) val argref = readNat() - t += { - if (isNameEntry(argref)) { + t `+=`: + if (isNameEntry(argref)) val name = at(argref, () => readName()) val arg = readClassfileAnnotArg(readNat()) untpd.NamedArg(name.asTermName, arg) - } else readAnnotArg(argref) - } - } t.toList - } untpd.resolveConstructor(atp, args) - } /** Read an annotation and as a side effect store it into * the symbol it requests. Called at top-level, for all * (symbol, annotInfo) entries. */ - protected def readSymbolAnnotation()(using Context): Unit = { + protected def readSymbolAnnotation()(using Context): Unit = val tag = readByte() if (tag != SYMANNOT) errorBadSignature("symbol annotation expected (" + tag + ")") val end = readNat() + readIndex val target = readSymbolRef() target.addAnnotation(deferredAnnot(end)) - } /** Read an annotation and return it. Used when unpickling * an ANNOTATED(WSELF)tpe or a NestedAnnotArg */ - protected def readAnnotation()(using Context): Annotation = { + protected def readAnnotation()(using Context): Annotation = val tag = readByte() if (tag != ANNOTINFO) errorBadSignature("annotation expected (" + tag + ")") val end = readNat() + readIndex deferredAnnot(end) - } /** A deferred annotation that can be completed by reading * the bytes between `readIndex` and `end`. */ - protected def deferredAnnot(end: Int)(using Context): Annotation = { + protected def deferredAnnot(end: Int)(using Context): Annotation = val start = readIndex val phase = ctx.phase Annotation.deferredSymAndTree( atReadPos(start, () => atPhase(phase)(readTypeRef().typeSymbol)))( atReadPos(start, () => atPhase(phase)(readAnnotationContents(end)))) - } /* Read an abstract syntax tree */ - protected def readTree()(using Context): Tree = { + protected def readTree()(using Context): Tree = val outerTag = readByte() if (outerTag != TREE) errorBadSignature("tree expected (" + outerTag + ")") @@ -1082,23 +984,21 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas var name: Name = null /** Read a Symbol, Modifiers, and a Name */ - def setSymModsName(): Unit = { + def setSymModsName(): Unit = symbol = readSymbolRef() mods = readModifiersRef(symbol.isType) name = readNameRef() - } /** Read a Symbol and a Name */ - def setSymName(): Unit = { + def setSymName(): Unit = symbol = readSymbolRef() name = readNameRef() - } /** Read a Symbol */ def setSym(): Unit = symbol = readSymbolRef() implicit val span: Span = NoSpan - tag match { + tag match case EMPTYtree => EmptyTree @@ -1333,8 +1233,6 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas case _ => noSuchTreeTag(tag, end) - } - } def noSuchTreeTag(tag: Int, end: Int)(using Context): Nothing = errorBadSignature("unknown tree type (" + tag + ")") @@ -1342,7 +1240,7 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas def unimplementedTree(what: String)(using Context): Nothing = errorBadSignature(s"cannot read $what trees from Scala 2.x signatures") - def readModifiers(isType: Boolean)(using Context): Modifiers = { + def readModifiers(isType: Boolean)(using Context): Modifiers = val tag = readNat() if (tag != MODIFIERS) errorBadSignature("expected a modifiers tag (" + tag + ")") @@ -1353,36 +1251,29 @@ class Scala2Unpickler(bytes: Array[Byte], classRoot: ClassDenotation, moduleClas val flags = unpickleScalaFlags(pflags, isType) val privateWithin = readNameRef().asTypeName Modifiers(flags, privateWithin, Nil) - } protected def readTemplateRef()(using Context): Template = - readTreeRef() match { + readTreeRef() match case templ: Template => templ case other => errorBadSignature("expected a template (" + other + ")") - } protected def readCaseDefRef()(using Context): CaseDef = - readTreeRef() match { + readTreeRef() match case tree: CaseDef => tree case other => errorBadSignature("expected a case def (" + other + ")") - } protected def readValDefRef()(using Context): ValDef = - readTreeRef() match { + readTreeRef() match case tree: ValDef => tree case other => errorBadSignature("expected a ValDef (" + other + ")") - } protected def readIdentRef()(using Context): Ident = - readTreeRef() match { + readTreeRef() match case tree: Ident => tree case other => errorBadSignature("expected an Ident (" + other + ")") - } protected def readTypeDefRef()(using Context): TypeDef = - readTreeRef() match { + readTreeRef() match case tree: TypeDef => tree case other => errorBadSignature("expected an TypeDef (" + other + ")") - } -} diff --git a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala index 85a56b9f1d15..7f7ea25ee3f1 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/DecompilationPrinter.scala @@ -18,31 +18,26 @@ import scala.quoted.runtime.impl.QuotesImpl * * @author Nicolas Stucki */ -class DecompilationPrinter extends Phase { +class DecompilationPrinter extends Phase: override def phaseName: String = "decompilationPrinter" override def run(using Context): Unit = if (ctx.settings.outputDir.isDefault) printToOutput(System.out) - else { + else val outputDir = ctx.settings.outputDir.value var os: OutputStream = null var ps: PrintStream = null - try { + try os = File(outputDir.fileNamed("decompiled.scala").path)(Codec.UTF8).outputStream(append = true) ps = new PrintStream(os, /* autoFlush = */ false, StandardCharsets.UTF_8.name) printToOutput(ps) - } - finally { + finally if (os ne null) os.close() if (ps ne null) ps.close() - } - } - private def printToOutput(out: PrintStream)(using Context): Unit = { + private def printToOutput(out: PrintStream)(using Context): Unit = val unit = ctx.compilationUnit val unitFile = unit.source.toString.replace("\\", "/").replace(".class", ".tasty") out.println(s"/** Decompiled from $unitFile */") out.println(QuotesImpl.showDecompiledTree(unit.tpdTree)) - } -} diff --git a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala index c148ff5f9bca..9feee94f2a51 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/IDEDecompilerDriver.scala @@ -15,9 +15,9 @@ import scala.quoted.runtime.impl.QuotesImpl /** * Decompiler to be used with IDEs */ -class IDEDecompilerDriver(val settings: List[String]) extends dotc.Driver { +class IDEDecompilerDriver(val settings: List[String]) extends dotc.Driver: - private val myInitCtx: Context = { + private val myInitCtx: Context = val rootCtx = initCtx.fresh.addMode(Mode.Interactive | Mode.ReadPositions) rootCtx.setSetting(rootCtx.settings.YreadComments, true) rootCtx.setSetting(rootCtx.settings.YretainTrees, true) @@ -25,16 +25,15 @@ class IDEDecompilerDriver(val settings: List[String]) extends dotc.Driver { val ctx = setup(settings.toArray :+ "dummy.scala", rootCtx).get._2 ctx.initialize()(using ctx) ctx - } private val decompiler = new PartialTASTYDecompiler - def run(tastyFile: AbstractFile): (String, String) = { + def run(tastyFile: AbstractFile): (String, String) = val reporter = new StoreReporter(null) with HideNonSensicalMessages val run = decompiler.newRun(using myInitCtx.fresh.setReporter(reporter)) - inContext(run.runContext) { + inContext(run.runContext): run.compile(List(tastyFile)) run.printSummary() val unit = ctx.run.nn.units.head @@ -44,6 +43,3 @@ class IDEDecompilerDriver(val settings: List[String]) extends dotc.Driver { reporter.removeBufferedMessages.foreach(message => System.err.println(message)) (tree, decompiled) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/decompiler/Main.scala b/compiler/src/dotty/tools/dotc/decompiler/Main.scala index 3cc94f782793..7f7bd5857799 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/Main.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/Main.scala @@ -10,18 +10,15 @@ import dotty.tools.io.AbstractFile * * @author Nicolas Stucki */ -object Main extends dotc.Driver { - override protected def newCompiler(using Context): dotc.Compiler = { +object Main extends dotc.Driver: + override protected def newCompiler(using Context): dotc.Compiler = assert(ctx.settings.fromTasty.value) if (!ctx.settings.outputDir.isDefault) Files.deleteIfExists(ctx.settings.outputDir.value.fileNamed("decompiled.scala").jpath) new TASTYDecompiler - } - override def setup(args0: Array[String], rootCtx: Context): Option[(List[AbstractFile], Context)] = { + override def setup(args0: Array[String], rootCtx: Context): Option[(List[AbstractFile], Context)] = var args = args0.filter(a => a != "-decompile") if (!args.contains("-from-tasty")) args = "-from-tasty" +: args if (args.contains("-d")) args = "-color:never" +: args super.setup(args, rootCtx) - } -} diff --git a/compiler/src/dotty/tools/dotc/decompiler/PartialTASTYDecompiler.scala b/compiler/src/dotty/tools/dotc/decompiler/PartialTASTYDecompiler.scala index 62bf158d0ef6..ef527f654f35 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/PartialTASTYDecompiler.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/PartialTASTYDecompiler.scala @@ -6,6 +6,5 @@ import dotty.tools.dotc.core.Phases.Phase * allowing to control decompiler output by manually running it * on the CompilationUnits */ -class PartialTASTYDecompiler extends TASTYDecompiler { +class PartialTASTYDecompiler extends TASTYDecompiler: override protected def backendPhases: List[List[Phase]] = Nil -} diff --git a/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala b/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala index 62f3e75d2001..aa6053c71a68 100644 --- a/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala +++ b/compiler/src/dotty/tools/dotc/decompiler/TASTYDecompiler.scala @@ -8,7 +8,7 @@ import dotty.tools.dotc.core.Phases.Phase * * @author Nicolas Stucki */ -class TASTYDecompiler extends TASTYCompiler { +class TASTYDecompiler extends TASTYCompiler: override protected def frontendPhases: List[List[Phase]] = List(new ReadTasty) :: // Load trees from TASTY files @@ -20,4 +20,3 @@ class TASTYDecompiler extends TASTYCompiler { override protected def backendPhases: List[List[Phase]] = List(new DecompilationPrinter) :: // Print all loaded classes Nil -} diff --git a/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala b/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala index 979fae239e59..287de6910eb3 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/Debug.scala @@ -11,8 +11,8 @@ import dotty.tools.io.Directory import java.io.{File => JFile} import java.nio.file.{Files, Paths} -object Debug { - def main(args: Array[String]): Unit = { +object Debug: + def main(args: Array[String]): Unit = // Preload scala.util.control.NonFatal. Otherwise, when trying to catch a StackOverflowError, // we may try to load it but fail with another StackOverflowError and lose the original exception, // see . @@ -30,11 +30,10 @@ object Debug { println("Compiling from .scala sources") val compilation1 = dotc.Main.process("-d" +: fromSourcesOut.toString +: args) - if (compilation1.hasErrors) { + if (compilation1.hasErrors) println("Failed compilation from sources") Directory(tmpOut).deleteRecursively() sys.exit(1) - } val fromTastyOut = Files.createDirectory(tmpOut.resolve("from-tasty")) @@ -53,22 +52,18 @@ object Debug { println("Compiling from .tasty sources") val compilation2 = dotc.Main.process(fromTastyArgs.toArray) - if (compilation2.hasErrors) { + if (compilation2.hasErrors) println("Failed compilation from TASTY") println("Compilation input: " + fromSourcesOut) // In this case we do not delete the generated class files to allow further debugging. // For example `dotc -decompile` on one of the intermediate class files. sys.exit(1) - } else { + else println("Recompilation successful") - } Directory(tmpOut).deleteRecursively() - } - private def insertClasspathInArgs(args: List[String], cp: String): List[String] = { + private def insertClasspathInArgs(args: List[String], cp: String): List[String] = val (beforeCp, fromCp) = args.span(_ != "-classpath") val classpath = fromCp.drop(1).headOption.fold(cp)(_ + JFile.pathSeparator + cp) "-classpath" :: classpath :: beforeCp ::: fromCp.drop(2) - } -} diff --git a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala index 86ae99b3e0f9..d2b530ca17e6 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/ReadTasty.scala @@ -14,7 +14,7 @@ import Phases.Phase /** Load trees from TASTY files */ -class ReadTasty extends Phase { +class ReadTasty extends Phase: def phaseName: String = "readTasty" @@ -24,61 +24,51 @@ class ReadTasty extends Phase { override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = withMode(Mode.ReadPositions)(units.flatMap(readTASTY(_))) - def readTASTY(unit: CompilationUnit)(using Context): Option[CompilationUnit] = unit match { + def readTASTY(unit: CompilationUnit)(using Context): Option[CompilationUnit] = unit match case unit: TASTYCompilationUnit => val className = unit.className.toTypeName - def cannotUnpickle(reason: String): None.type = { + def cannotUnpickle(reason: String): None.type = report.error(em"class $className cannot be unpickled because $reason") None - } - def compilationUnit(cls: Symbol): Option[CompilationUnit] = cls match { + def compilationUnit(cls: Symbol): Option[CompilationUnit] = cls match case cls: ClassSymbol => - (cls.rootTreeOrProvider: @unchecked) match { + (cls.rootTreeOrProvider: @unchecked) match case unpickler: tasty.DottyUnpickler => if (cls.rootTree.isEmpty) None - else { + else val unit = CompilationUnit(cls, cls.rootTree, forceTrees = true) unit.pickled += (cls -> (() => unpickler.unpickler.bytes)) Some(unit) - } case tree: Tree[?] => // TODO handle correctly this case correctly to get the tree or avoid it completely. - cls.denot.infoOrCompleter match { + cls.denot.infoOrCompleter match case _ => Some(AlreadyLoadedCompilationUnit(cls.denot.fullName.toString)) - } case _ => cannotUnpickle(s"its class file does not have a TASTY attribute") - } case _ => None - } // The TASTY section in a/b/C.class may either contain a class a.b.C, an object a.b.C, or both. // We first try to load the class and fallback to loading the object if the class doesn't exist. // Note that if both the class and the object are present, then loading the class will also load // the object, this is why we use orElse here, otherwise we could load the object twice and // create ambiguities! - staticRef(className) match { + staticRef(className) match case clsd: ClassDenotation => - clsd.infoOrCompleter match { + clsd.infoOrCompleter match case info: ClassfileLoader => info.load(clsd) // sets cls.rootTreeOrProvider and cls.moduleClass.treeProvider as a side-effect case _ => - } def moduleClass = clsd.owner.info.member(className.moduleClassName).symbol compilationUnit(clsd.classSymbol).orElse(compilationUnit(moduleClass)) case _ => - staticRef(className.moduleClassName) match { + staticRef(className.moduleClassName) match case clsd: ClassDenotation => compilationUnit(clsd.classSymbol) case denot => cannotUnpickle(s"no class file was found for denot: $denot") - } - } case unit => Some(unit) - } def run(using Context): Unit = unsupported("run") -} diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompilationUnit.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompilationUnit.scala index 77021efa3050..2ec4e2f25fdd 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompilationUnit.scala @@ -3,6 +3,5 @@ package dotty.tools.dotc.fromtasty import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.util.NoSource -class TASTYCompilationUnit(val className: String) extends CompilationUnit(NoSource) { +class TASTYCompilationUnit(val className: String) extends CompilationUnit(NoSource): override def toString: String = s"class file $className" -} diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala index 923892b62f13..b927838059c6 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYCompiler.scala @@ -6,13 +6,11 @@ import core._ import Contexts._ import Phases.Phase -class TASTYCompiler extends Compiler { +class TASTYCompiler extends Compiler: override protected def frontendPhases: List[List[Phase]] = List(new ReadTasty) :: Nil - override def newRun(using Context): Run = { + override def newRun(using Context): Run = reset() new TASTYRun(this, ctx.addMode(Mode.ReadPositions)) - } -} diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala index fb0abe3332ed..f36574e8f436 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TASTYRun.scala @@ -9,11 +9,10 @@ import core.Contexts._ import core.Decorators.em import java.io.File -class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) { - override def compile(files: List[AbstractFile]): Unit = { +class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx): + override def compile(files: List[AbstractFile]): Unit = val units = tastyUnits(files) compileUnits(units) - } private def tastyUnits(files: List[AbstractFile]): List[TASTYCompilationUnit] = val fromTastyIgnoreList = ctx.settings.YfromTastyIgnoreList.value.toSet @@ -32,4 +31,3 @@ class TASTYRun(comp: Compiler, ictx: Context) extends Run(comp, ictx) { Nil } classNames.map(new TASTYCompilationUnit(_)) -} diff --git a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala index bc04cc648a65..9e8a21401f43 100644 --- a/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala +++ b/compiler/src/dotty/tools/dotc/fromtasty/TastyFileUtil.scala @@ -7,7 +7,7 @@ import dotty.tools.dotc.core.tasty.TastyClassName import dotty.tools.dotc.core.StdNames.nme.EMPTY_PACKAGE import dotty.tools.io.AbstractFile -object TastyFileUtil { +object TastyFileUtil: /** Get the class path of a tasty file * * If @@ -32,19 +32,16 @@ object TastyFileUtil { * ``` * then `getClassName("./out/foo/Foo.tasty") returns `Some("foo.Foo")` */ - def getClassName(file: AbstractFile): Option[String] = { + def getClassName(file: AbstractFile): Option[String] = assert(file.exists) assert(file.extension == "tasty") val bytes = file.toByteArray val names = new TastyClassName(bytes).readName() names.map { case (packageName, className) => - val fullName = packageName match { + val fullName = packageName match case EMPTY_PACKAGE => s"${className.lastPart}" case _ => s"$packageName.${className.lastPart}" - } fullName } - } -} diff --git a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala index ebb76e9e9bf9..908904cc29ee 100644 --- a/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala +++ b/compiler/src/dotty/tools/dotc/inlines/InlineReducer.scala @@ -40,15 +40,14 @@ class InlineReducer(inliner: Inliner)(using Context): * - any bindings that wrap the instance creation * - whether the instance creation is precomputed or by-name */ - private object NewInstance { - def unapply(tree: Tree)(using Context): Option[(Symbol, List[Tree], List[Tree], Boolean)] = { + private object NewInstance: + def unapply(tree: Tree)(using Context): Option[(Symbol, List[Tree], List[Tree], Boolean)] = def unapplyLet(bindings: List[Tree], expr: Tree) = - unapply(expr) map { + unapply(expr) map: case (cls, reduced, prefix, precomputed) => (cls, reduced, bindings ::: prefix, precomputed) - } - tree match { + tree match case Apply(fn, args) => - fn match { + fn match case Select(New(tpt), nme.CONSTRUCTOR) => Some((tpt.tpe.classSymbol, args, Nil, false)) case TypeApply(Select(New(tpt), nme.CONSTRUCTOR), _) => @@ -60,7 +59,6 @@ class InlineReducer(inliner: Inliner)(using Context): meth.owner.linkedClass.is(Case)) Some(meth.owner.linkedClass, args, Nil, false) else None - } case Typed(inner, _) => // drop the ascribed tpt. We only need it if we can't find a NewInstance unapply(inner) @@ -74,30 +72,25 @@ class InlineReducer(inliner: Inliner)(using Context): unapplyLet(stats, expr) case _ => None - } - } - } /** If `tree` is equivalent to `new C(args).x` where class `C` does not have * initialization code and `x` is a parameter corresponding to one of the * arguments `args`, the corresponding argument, otherwise `tree` itself. * Side effects of original arguments need to be preserved. */ - def reduceProjection(tree: Tree)(using Context): Tree = { + def reduceProjection(tree: Tree)(using Context): Tree = if (ctx.debug) inlining.println(i"try reduce projection $tree") - tree match { + tree match case Select(NewInstance(cls, args, prefix, precomputed), field) if cls.isNoInitsRealClass => def matches(param: Symbol, selection: Symbol): Boolean = - param == selection || { - selection.name match { + param == selection `||`: + selection.name match case InlineAccessorName(underlying) => param.name == underlying && selection.info.isInstanceOf[ExprType] case _ => false - } - } val idx = cls.asClass.paramAccessors.indexWhere(matches(_, tree.symbol)) - if (idx >= 0 && idx < args.length) { + if (idx >= 0 && idx < args.length) def finish(arg: Tree) = new TreeTypeMap().transform(arg) // make sure local bindings in argument have fresh symbols .showing(i"projecting $tree -> $result", inlining) @@ -105,7 +98,7 @@ class InlineReducer(inliner: Inliner)(using Context): if (precomputed) if (isElideableExpr(arg)) finish(arg) else tree // nothing we can do here, projection would duplicate side effect - else { + else // newInstance is evaluated in place, need to reflect side effects of // arguments in the order they were written originally def collectImpure(from: Int, end: Int) = @@ -119,34 +112,27 @@ class InlineReducer(inliner: Inliner)(using Context): letBindUnless(TreeInfo.Pure, arg)(Block(trailing, _).withSpan(argsSpan)) val blockSpan = (prefix ::: leading).map(_.span).foldLeft(argInPlace.span)(_.union(_)) finish(seq(prefix, seq(leading, argInPlace)).withSpan(blockSpan)) - } - } else tree case Block(stats, expr) if stats.forall(isPureBinding) => cpy.Block(tree)(stats, reduceProjection(expr)) case _ => tree - } - } /** If this is a value binding: * - reduce its rhs if it is a projection and adjust its type accordingly, * - record symbol -> rhs in the InlineBindings context propery. */ - def normalizeBinding(binding: ValOrDefDef)(using Context) = { - val binding1 = binding match { + def normalizeBinding(binding: ValOrDefDef)(using Context) = + val binding1 = binding match case binding: ValDef => val rhs1 = reduceProjection(binding.rhs) binding.symbol.defTree = rhs1 if (rhs1 `eq` binding.rhs) binding - else { + else binding.symbol.info = rhs1.tpe cpy.ValDef(binding)(tpt = TypeTree(rhs1.tpe), rhs = rhs1) - } case _ => binding - } binding1.withSpan(call.span) - } /** The result type of reducing a match. It consists optionally of a list of bindings * for the pattern-bound variables and the RHS of the selected case. @@ -169,7 +155,7 @@ class InlineReducer(inliner: Inliner)(using Context): * @return optionally, if match can be reduced to a matching case: A pair of * bindings for all pattern-bound variables and the RHS of the case. */ - def reduceInlineMatch(scrutinee: Tree, scrutType: Type, cases: List[CaseDef], typer: Typer)(using Context): MatchRedux = { + def reduceInlineMatch(scrutinee: Tree, scrutType: Type, cases: List[CaseDef], typer: Typer)(using Context): MatchRedux = val isImplicit = scrutinee.isEmpty @@ -180,27 +166,25 @@ class InlineReducer(inliner: Inliner)(using Context): caseBindingMap: mutable.ListBuffer[(Symbol, MemberDef)], scrut: TermRef, pat: Tree - )(using Context): Boolean = { + )(using Context): Boolean = /** Create a binding of a pattern bound variable with matching part of * scrutinee as RHS and type that corresponds to RHS. */ - def newTermBinding(sym: TermSymbol, rhs: Tree): Unit = { + def newTermBinding(sym: TermSymbol, rhs: Tree): Unit = val copied = sym.copy(info = rhs.tpe.widenInlineScrutinee, coord = sym.coord, flags = sym.flags &~ Case).asTerm caseBindingMap += ((sym, ValDef(copied, constToLiteral(rhs)).withSpan(sym.span))) - } - def newTypeBinding(sym: TypeSymbol, alias: Type): Unit = { + def newTypeBinding(sym: TypeSymbol, alias: Type): Unit = val copied = sym.copy(info = TypeAlias(alias), coord = sym.coord).asType caseBindingMap += ((sym, TypeDef(copied))) - } - def searchImplicit(sym: TermSymbol, tpt: Tree) = { + def searchImplicit(sym: TermSymbol, tpt: Tree) = val evTyper = new Typer(ctx.nestingLevel + 1) val evCtx = ctx.fresh.setTyper(evTyper) - inContext(evCtx) { + inContext(evCtx): val evidence = evTyper.inferImplicitArg(tpt.tpe, tpt.span) - evidence.tpe match { + evidence.tpe match case fail: Implicits.AmbiguousImplicits => report.error(evTyper.missingArgMsg(evidence, tpt.tpe, ""), tpt.srcPos) true // hard error: return true to stop implicit search here @@ -210,23 +194,17 @@ class InlineReducer(inliner: Inliner)(using Context): //inlining.println(i"inferred implicit $sym: ${sym.info} with $evidence: ${evidence.tpe.widen}, ${evCtx.gadt.constraint}, ${evCtx.typerState.constraint}") newTermBinding(sym, evidence) true - } - } - } type TypeBindsMap = SimpleIdentityMap[TypeSymbol, java.lang.Boolean] - def getTypeBindsMap(pat: Tree, tpt: Tree): TypeBindsMap = { - val getBinds = new TreeAccumulator[Set[TypeSymbol]] { - def apply(syms: Set[TypeSymbol], t: Tree)(using Context): Set[TypeSymbol] = { - val syms1 = t match { + def getTypeBindsMap(pat: Tree, tpt: Tree): TypeBindsMap = + val getBinds = new TreeAccumulator[Set[TypeSymbol]]: + def apply(syms: Set[TypeSymbol], t: Tree)(using Context): Set[TypeSymbol] = + val syms1 = t match case t: Bind if t.symbol.isType => syms + t.symbol.asType case _ => syms - } foldOver(syms1, t) - } - } // Extractors can contain Bind nodes in type parameter lists, // for that case tree looks like this: @@ -239,34 +217,28 @@ class InlineReducer(inliner: Inliner)(using Context): // and the binds will be found in the type tree instead // Test case is pos-macros/i15971 val tptBinds = getBinds(Set.empty[TypeSymbol], tpt) - val binds: Set[TypeSymbol] = pat match { + val binds: Set[TypeSymbol] = pat match case UnApply(TypeApply(_, tpts), _, _) => getBinds(Set.empty[TypeSymbol], tpts) ++ tptBinds case _ => tptBinds - } - val extractBindVariance = new TypeAccumulator[TypeBindsMap] { - def apply(syms: TypeBindsMap, t: Type) = { - val syms1 = t match { + val extractBindVariance = new TypeAccumulator[TypeBindsMap]: + def apply(syms: TypeBindsMap, t: Type) = + val syms1 = t match // `binds` is used to check if the symbol was actually bound by the pattern we're processing case tr: TypeRef if tr.symbol.is(Case) && binds.contains(tr.symbol.asType) => val trSym = tr.symbol.asType // Exact same logic as in IsFullyDefinedAccumulator: // the binding is to be maximized iff it only occurs contravariantly in the type - val wasToBeMinimized: Boolean = { + val wasToBeMinimized: Boolean = val v = syms(trSym) if (v != null) v else false - } syms.updated(trSym, wasToBeMinimized || variance >= 0 : java.lang.Boolean) case _ => syms - } foldOver(syms1, t) - } - } extractBindVariance(SimpleIdentityMap.empty, tpt.tpe) - } def addTypeBindings(typeBinds: TypeBindsMap)(using Context): Unit = typeBinds.foreachBinding { case (sym, shouldBeMinimized) => @@ -277,43 +249,38 @@ class InlineReducer(inliner: Inliner)(using Context): def registerAsGadtSyms(typeBinds: TypeBindsMap)(using Context): Unit = if (typeBinds.size > 0) ctx.gadtState.addToConstraint(typeBinds.keys) - pat match { + pat match case Typed(pat1, tpt) => val typeBinds = getTypeBindsMap(pat1, tpt) registerAsGadtSyms(typeBinds) - scrut <:< tpt.tpe && { + scrut <:< tpt.tpe `&&`: addTypeBindings(typeBinds) reducePattern(caseBindingMap, scrut, pat1) - } case pat @ Bind(name: TermName, Typed(_, tpt)) if isImplicit => val typeBinds = getTypeBindsMap(tpt, tpt) registerAsGadtSyms(typeBinds) - searchImplicit(pat.symbol.asTerm, tpt) && { + searchImplicit(pat.symbol.asTerm, tpt) `&&`: addTypeBindings(typeBinds) true - } case pat @ Bind(name: TermName, body) => - reducePattern(caseBindingMap, scrut, body) && { + reducePattern(caseBindingMap, scrut, body) `&&`: if (name != nme.WILDCARD) newTermBinding(pat.symbol.asTerm, ref(scrut)) true - } case Ident(nme.WILDCARD) => true case pat: Literal => scrut.widenTermRefExpr =:= pat.tpe case pat: RefTree => scrut =:= pat.tpe || - scrut.classSymbol.is(Module) && scrut.widen =:= pat.tpe.widen && { - scrut.prefix match { + scrut.classSymbol.is(Module) && scrut.widen =:= pat.tpe.widen `&&`: + scrut.prefix match case _: SingletonType | NoPrefix => true case _ => false - } - } case UnApply(unapp, _, pats) => - unapp.tpe.widen match { + unapp.tpe.widen match case mt: MethodType if mt.paramInfos.length == 1 => - def reduceSubPatterns(pats: List[Tree], selectors: List[Tree]): Boolean = (pats, selectors) match { + def reduceSubPatterns(pats: List[Tree], selectors: List[Tree]): Boolean = (pats, selectors) match case (Nil, Nil) => true case (pat :: pats1, selector :: selectors1) => val elem = newSym(InlineBinderName.fresh(), Synthetic, selector.tpe.widenInlineScrutinee).asTerm @@ -323,11 +290,10 @@ class InlineReducer(inliner: Inliner)(using Context): reducePattern(caseBindingMap, elem.termRef, pat) && reduceSubPatterns(pats1, selectors1) case _ => false - } val paramType = mt.paramInfos.head val paramCls = paramType.classSymbol - if (paramCls.is(Case) && unapp.symbol.is(Synthetic) && scrut <:< paramType) { + if (paramCls.is(Case) && unapp.symbol.is(Synthetic) && scrut <:< paramType) val caseAccessors = if (paramCls.is(Scala2x)) paramCls.caseAccessors.filter(_.is(Method)) else paramCls.asClass.paramAccessors @@ -335,65 +301,55 @@ class InlineReducer(inliner: Inliner)(using Context): for (accessor <- caseAccessors) yield constToLiteral(reduceProjection(ref(scrut).select(accessor).ensureApplied)) caseAccessors.length == pats.length && reduceSubPatterns(pats, selectors) - } else false case _ => false - } case Alternative(pats) => pats.exists(reducePattern(caseBindingMap, scrut, _)) case Inlined(EmptyTree, Nil, ipat) => reducePattern(caseBindingMap, scrut, ipat) case _ => false - } - } /** The initial scrutinee binding: `val $scrutineeN = ` */ val scrutineeSym = newSym(InlineScrutineeName.fresh(), Synthetic, scrutType).asTerm val scrutineeBinding = normalizeBinding(ValDef(scrutineeSym, scrutinee)) - def reduceCase(cdef: CaseDef): MatchReduxWithGuard = { + def reduceCase(cdef: CaseDef): MatchReduxWithGuard = val caseBindingMap = new mutable.ListBuffer[(Symbol, MemberDef)]() def substBindings( bindings: List[(Symbol, MemberDef)], bbuf: mutable.ListBuffer[MemberDef], from: List[Symbol], to: List[Symbol]): (List[MemberDef], List[Symbol], List[Symbol]) = - bindings match { + bindings match case (sym, binding) :: rest => bbuf += binding.subst(from, to).asInstanceOf[MemberDef] if (sym.exists) substBindings(rest, bbuf, sym :: from, binding.symbol :: to) else substBindings(rest, bbuf, from, to) case Nil => (bbuf.toList, from, to) - } if (!isImplicit) caseBindingMap += ((NoSymbol, scrutineeBinding)) val gadtCtx = ctx.fresh.setFreshGADTBounds.addMode(Mode.GadtConstraintInference) - if (reducePattern(caseBindingMap, scrutineeSym.termRef, cdef.pat)(using gadtCtx)) { + if (reducePattern(caseBindingMap, scrutineeSym.termRef, cdef.pat)(using gadtCtx)) val (caseBindings, from, to) = substBindings(caseBindingMap.toList, mutable.ListBuffer(), Nil, Nil) val (guardOK, canReduceGuard) = if cdef.guard.isEmpty then (true, true) - else typer.typed(cdef.guard.subst(from, to), defn.BooleanType) match { + else typer.typed(cdef.guard.subst(from, to), defn.BooleanType) match case ConstantValue(v: Boolean) => (v, true) case _ => (false, false) - } if guardOK then Some((caseBindings.map(_.subst(from, to)), cdef.body.subst(from, to), canReduceGuard)) else if canReduceGuard then None else Some((caseBindings.map(_.subst(from, to)), cdef.body.subst(from, to), canReduceGuard)) - } else None - } - def recur(cases: List[CaseDef]): MatchRedux = cases match { + def recur(cases: List[CaseDef]): MatchRedux = cases match case Nil => None case cdef :: cases1 => reduceCase(cdef) match case None => recur(cases1) case r @ Some((caseBindings, rhs, canReduceGuard)) if canReduceGuard => Some((caseBindings, rhs)) case _ => None - } recur(cases) - } end InlineReducer diff --git a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala index 73fa2a2871a2..ef9f625895a7 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inliner.scala @@ -38,7 +38,7 @@ object Inliner: * of a containing object so they are merely idempotent. */ object isElideableExpr: - def isStatElideable(tree: Tree)(using Context): Boolean = unsplice(tree) match { + def isStatElideable(tree: Tree)(using Context): Boolean = unsplice(tree) match case EmptyTree | TypeDef(_, _) | Import(_, _) @@ -48,9 +48,8 @@ object Inliner: if (vdef.symbol.flags is Mutable) false else apply(vdef.rhs) case _ => false - } - def apply(tree: Tree)(using Context): Boolean = unsplice(tree) match { + def apply(tree: Tree)(using Context): Boolean = unsplice(tree) match case EmptyTree | This(_) | Super(_, _) @@ -66,7 +65,7 @@ object Inliner: case TypeApply(fn, _) => if (fn.symbol.is(Erased) || fn.symbol == defn.QuotedTypeModule_of) true else apply(fn) case Apply(fn, args) => - val isCaseClassApply = { + val isCaseClassApply = val cls = tree.tpe.classSymbol val meth = fn.symbol meth.name == nme.apply && @@ -77,7 +76,6 @@ object Inliner: case Select(qual, _) => qual.symbol.is(Synthetic) // e.g: disallow `{ ..; Foo }.apply(..)` case meth @ Ident(_) => meth.symbol.is(Synthetic) // e.g: allow `import Foo.{ apply => foo }; foo(..)` case _ => false - } if isPureApply(tree, fn) then apply(fn) && args.forall(apply) else if (isCaseClassApply) @@ -94,7 +92,6 @@ object Inliner: apply(expr) case _ => false - } end isElideableExpr // InlineCopier is a more fault-tolerant copier that does not cause errors when @@ -205,7 +202,7 @@ class Inliner(val call: tpd.Tree)(using Context): * @param buf the buffer to which the definition should be appended */ private[inlines] def paramBindingDef(name: Name, formal: Type, arg0: Tree, - buf: DefBuffer)(using Context): ValOrDefDef = { + buf: DefBuffer)(using Context): ValOrDefDef = val isByName = formal.dealias.isInstanceOf[ExprType] val arg = def dropNameArg(arg: Tree): Tree = arg match @@ -243,7 +240,6 @@ class Inliner(val call: tpd.Tree)(using Context): inlining.println(i"parameter binding: $binding, $argIsBottom") buf += binding binding - } /** Populate `paramBinding` and `buf` by matching parameters with * corresponding arguments. `bindingbuf` will be further extended later by @@ -283,7 +279,7 @@ class Inliner(val call: tpd.Tree)(using Context): private def classNestingLevel(cls: Symbol) = cls.ownersIterator.count(_.isClass) // Compute val-definitions for all this-proxies and append them to `bindingsBuf` - private def computeThisBindings() = { + private def computeThisBindings() = // All needed this-proxies, paired-with and sorted-by nesting depth of // the classes they represent (innermost first) val sortedProxies = thisProxy.toList @@ -298,7 +294,7 @@ class Inliner(val call: tpd.Tree)(using Context): var lastSelf: Symbol = NoSymbol var lastCls: Symbol = NoSymbol var lastLevel: Int = 0 - for ((level, selfSym, cls) <- sortedProxies) { + for ((level, selfSym, cls) <- sortedProxies) val rhs = selfSym.info.dealias match case info: TermRef if info.isStable && (lastSelf.exists || isPureExpr(inlineCallPrefix)) => @@ -326,8 +322,6 @@ class Inliner(val call: tpd.Tree)(using Context): lastSelf = selfSym lastLevel = level lastCls = cls - } - } /** A list of pairs between TermRefs appearing in thisProxy bindings that * refer to objects with opaque type aliases and local proxy symbols @@ -340,15 +334,14 @@ class Inliner(val call: tpd.Tree)(using Context): /** Map first halfs of opaqueProxies pairs to second halfs, using =:= as equality */ private def mapRef(ref: TermRef): Option[TermRef] = - opaqueProxies.collectFirst { + opaqueProxies.collectFirst: case (from, to) if from.symbol == ref.symbol && from =:= ref => to - } /** If `tp` contains TermRefs that refer to objects with opaque * type aliases, add proxy definitions to `opaqueProxies` that expose these aliases. */ private def addOpaqueProxies(tp: Type, span: Span, forThisProxy: Boolean)(using Context): Unit = - tp.foreachPart { + tp.foreachPart: case ref: TermRef => for cls <- ref.widen.baseClasses do if cls.containsOpaques @@ -374,7 +367,6 @@ class Inliner(val call: tpd.Tree)(using Context): bindingsBuf += refiningDef opaqueProxies += ((ref, refiningSym.termRef)) case _ => - } /** Map all TermRefs that match left element in `opaqueProxies` to the * corresponding right element. @@ -382,11 +374,10 @@ class Inliner(val call: tpd.Tree)(using Context): private val mapOpaques = TreeTypeMap( typeMap = new TypeMap: override def stopAt = StopAt.Package - def apply(t: Type) = mapOver { + def apply(t: Type) = mapOver: t match case ref: TermRef => mapRef(ref).getOrElse(ref) case _ => t - } ) /** If `binding` contains TermRefs that refer to objects with opaque @@ -450,13 +441,12 @@ class Inliner(val call: tpd.Tree)(using Context): * references of a method are (we only know the method's type, but that contains TypeParamRefs * and MethodParams, not TypeRefs or TermRefs. */ - private def registerType(tpe: Type): Unit = tpe match { + private def registerType(tpe: Type): Unit = tpe match case tpe: ThisType if !canElideThis(tpe) && !thisProxy.contains(tpe.cls) => val proxyName = s"${tpe.cls.name}_this".toTermName - val proxyType = inlineCallPrefix.tpe.dealias.tryNormalize match { + val proxyType = inlineCallPrefix.tpe.dealias.tryNormalize match case typeMatchResult if typeMatchResult.exists => typeMatchResult case _ => adaptToPrefix(tpe).widenIfUnstable - } thisProxy(tpe.cls) = newSym(proxyName, InlineProxy, proxyType).termRef for (param <- tpe.cls.typeParams) paramProxy(param.typeRef) = adaptToPrefix(param.typeRef) @@ -475,7 +465,6 @@ class Inliner(val call: tpd.Tree)(using Context): // The widened type may contain param types too (see tests/pos/i12379a.scala) if tpe.isTerm then registerType(tpe.widenTermRefExpr) case _ => - } private val registerTypes = new TypeTraverser: override def stopAt = StopAt.Package @@ -498,10 +487,9 @@ class Inliner(val call: tpd.Tree)(using Context): tree.changeOwner(originalOwner, ctx.owner) def tryConstValue: Tree = - TypeComparer.constValue(callTypeArgs.head.tpe) match { + TypeComparer.constValue(callTypeArgs.head.tpe) match case Some(c) => Literal(c).withSpan(call.span) case _ => EmptyTree - } val reducer = new InlineReducer(this) @@ -563,27 +551,24 @@ class Inliner(val call: tpd.Tree)(using Context): new DeepTypeMap { override def stopAt = if opaqueProxies.isEmpty then StopAt.Static else StopAt.Package - def apply(t: Type) = t match { + def apply(t: Type) = t match case t: ThisType => thisProxy.getOrElse(t.cls, t) case t: TypeRef => paramProxy.getOrElse(t, mapOver(t)) case t: SingletonType => if t.termSymbol.isAllOf(InlineParam) then apply(t.widenTermRefExpr) else paramProxy.getOrElse(t, mapOver(t)) case t => mapOver(t) - } }, - treeMap = { + treeMap = { case tree: This => - tree.tpe match { + tree.tpe match case thistpe: ThisType => - thisProxy.get(thistpe.cls) match { + thisProxy.get(thistpe.cls) match case Some(t) => val thisRef = ref(t).withSpan(call.span) inlinedFromOutside(thisRef)(tree.span) case None => tree - } case _ => tree - } case tree: Ident => /* Span of the argument. Used when the argument is inlined directly without a binding */ def argSpan = @@ -591,14 +576,13 @@ class Inliner(val call: tpd.Tree)(using Context): else if (tree.symbol.isTypeParam && tree.symbol.owner.isClass) tree.span // TODO is this the correct span? else paramSpan(tree.name) val inlinedCtx = ctx.withSource(inlinedMethod.topLevelClass.source) - paramProxy.get(tree.tpe) match { + paramProxy.get(tree.tpe) match case Some(t) if tree.isTerm && t.isSingleton => val inlinedSingleton = singleton(t).withSpan(argSpan) inlinedFromOutside(inlinedSingleton)(tree.span) case Some(t) if tree.isType => inlinedFromOutside(TypeTree(t).withSpan(argSpan))(tree.span) case _ => tree - } case tree @ Select(qual: This, name) if tree.symbol.is(Private) && tree.symbol.isInlineMethod => // This inline method refers to another (private) inline method (see tests/pos/i14042.scala). // We insert upcast to access the private inline method once inlined. This makes the selection @@ -623,12 +607,11 @@ class Inliner(val call: tpd.Tree)(using Context): // make them part of `bindingsBuf`. The expansion is then the tree that remains. val expansion = inliner.transform(rhsToInline) - def issueError() = callValueArgss match { + def issueError() = callValueArgss match case (msgArg :: Nil) :: Nil => - val message = msgArg.tpe match { + val message = msgArg.tpe match case ConstantType(Constant(msg: String)) => msg.toMessage case _ => em"A literal string is expected as an argument to `compiletime.error`. Got $msgArg" - } // Usually `error` is called from within a rewrite method. In this // case we need to report the error at the point of the outermost enclosing inline // call. This way, a defensively written rewrite method can always @@ -637,11 +620,9 @@ class Inliner(val call: tpd.Tree)(using Context): val ctxToReport = ctx.outersIterator.dropWhile(enclosingInlineds(using _).nonEmpty).next // The context in which we report should still use the existing context reporter val ctxOrigReporter = ctxToReport.fresh.setReporter(ctx.reporter) - inContext(ctxOrigReporter) { + inContext(ctxOrigReporter): report.error(message, callToReport.srcPos) - } case _ => - } /** The number of nodes in this tree, excluding code in nested inline * calls and annotations of definitions. @@ -663,28 +644,26 @@ class Inliner(val call: tpd.Tree)(using Context): case _ => siz - trace(i"inlining $call", inlining, show = true) { + trace(i"inlining $call", inlining, show = true): // The normalized bindings collected in `bindingsBuf` bindingsBuf.mapInPlace { binding => // Set trees to symbols allow macros to see the definition tree. // This is used by `underlyingArgument`. val binding1 = reducer.normalizeBinding(binding)(using inlineCtx).setDefTree - binding1.foreachSubTree { + binding1.foreachSubTree: case tree: MemberDef => tree.setDefTree case _ => - } binding1 } // Run a typing pass over the inlined tree. See InlineTyper for details. val expansion1 = inlineTyper.typed(expansion)(using inlineCtx) - if (ctx.settings.verbose.value) { + if (ctx.settings.verbose.value) inlining.println(i"to inline = $rhsToInline") inlining.println(i"original bindings = ${bindingsBuf.toList}%\n%") inlining.println(i"original expansion = $expansion1") - } // Drop unused bindings val (finalBindings, finalExpansion) = dropUnusedDefs(bindingsBuf.toList, expansion1) @@ -694,34 +673,29 @@ class Inliner(val call: tpd.Tree)(using Context): addInlinedTrees(treeSize(finalExpansion)) (finalBindings, finalExpansion) - } end inlined /** An extractor for references to inlineable arguments. These are : * - by-value arguments marked with `inline` * - all by-name arguments */ - private object InlineableArg { + private object InlineableArg: lazy val paramProxies = paramProxy.values.toSet - def unapply(tree: Trees.Ident[?])(using Context): Option[Tree] = { + def unapply(tree: Trees.Ident[?])(using Context): Option[Tree] = def search(buf: DefBuffer) = buf.find(_.name == tree.name) if (paramProxies.contains(tree.typeOpt)) - search(bindingsBuf) match { + search(bindingsBuf) match case Some(bind: ValOrDefDef) if bind.symbol.is(Inline) => Some(integrate(bind.rhs, bind.symbol)) case _ => None - } else None - } - } - private[inlines] def tryInlineArg(tree: Tree)(using Context): Tree = tree match { + private[inlines] def tryInlineArg(tree: Tree)(using Context): Tree = tree match case InlineableArg(rhs) => inlining.println(i"inline arg $tree -> $rhs") rhs case _ => EmptyTree - } /** A typer for inlined bodies. Beyond standard typing, an inline typer performs * the following functions: @@ -736,18 +710,15 @@ class Inliner(val call: tpd.Tree)(using Context): extends ReTyper(nestingLevel): import reducer._ - override def ensureAccessible(tpe: Type, superAccess: Boolean, pos: SrcPos)(using Context): Type = { - tpe match { + override def ensureAccessible(tpe: Type, superAccess: Boolean, pos: SrcPos)(using Context): Type = + tpe match case tpe: NamedType if tpe.symbol.exists && !tpe.symbol.isAccessibleFrom(tpe.prefix, superAccess) => - tpe.info match { + tpe.info match case TypeAlias(alias) => return ensureAccessible(alias, superAccess, pos) case info: ConstantType if tpe.symbol.isStableMember => return info case _ => - } case _ => - } super.ensureAccessible(tpe, superAccess, pos) - } /** Enter implicits in scope so that they can be found in implicit search. * This is important for non-transparent inlines @@ -769,7 +740,7 @@ class Inliner(val call: tpd.Tree)(using Context): case _ => tree1 - override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { + override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = val locked = ctx.typerState.ownedVars val qual1 = typed(tree.qualifier, shallowSelectionProto(tree.name, pt, this)) val resNoReduce = untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt) @@ -786,11 +757,10 @@ class Inliner(val call: tpd.Tree)(using Context): else ensureAccessible(res.tpe, tree.qualifier.isInstanceOf[untpd.Super], tree.srcPos) res - } override def typedIf(tree: untpd.If, pt: Type)(using Context): Tree = val condCtx = if tree.isInline then ctx.addMode(Mode.ForceInline) else ctx - typed(tree.cond, defn.BooleanType)(using condCtx) match { + typed(tree.cond, defn.BooleanType)(using condCtx) match case cond1 @ ConstantValue(b: Boolean) => val selected0 = if (b) tree.thenp else tree.elsep val selected = if (selected0.isEmpty) tpd.Literal(Constant(())) else typed(selected0, pt) @@ -804,7 +774,6 @@ class Inliner(val call: tpd.Tree)(using Context): cond1.computeNullableDeeply() val if1 = untpd.cpy.If(tree)(cond = untpd.TypedSplice(cond1)) super.typedIf(if1, pt) - } override def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = val vdef1 = @@ -853,30 +822,27 @@ class Inliner(val call: tpd.Tree)(using Context): override def typedMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: Type)(using Context) = if (!tree.isInline || ctx.owner.isInlineMethod) // don't reduce match of nested inline method yet super.typedMatchFinish(tree, sel, wideSelType, cases, pt) - else { - def selTyped(sel: Tree): Type = sel match { + else + def selTyped(sel: Tree): Type = sel match case Typed(sel2, _) => selTyped(sel2) case Block(Nil, sel2) => selTyped(sel2) case Inlined(_, Nil, sel2) => selTyped(sel2) case _ => sel.tpe - } val selType = if (sel.isEmpty) wideSelType else selTyped(sel) - reduceInlineMatch(sel, selType, cases.asInstanceOf[List[CaseDef]], this) match { + reduceInlineMatch(sel, selType, cases.asInstanceOf[List[CaseDef]], this) match case Some((caseBindings, rhs0)) => // drop type ascriptions/casts hiding pattern-bound types (which are now aliases after reducing the match) // note that any actually necessary casts will be reinserted by the typing pass below - val rhs1 = rhs0 match { + val rhs1 = rhs0 match case Block(stats, t) if t.span.isSynthetic => - t match { + t match case Typed(expr, _) => Block(stats, expr) case TypeApply(sel@Select(expr, _), _) if sel.symbol.isTypeCast => Block(stats, expr) case _ => rhs0 - } case _ => rhs0 - } val (usedBindings, rhs2) = dropUnusedDefs(caseBindings, rhs1) val rhs = seq(usedBindings, rhs2) inlining.println(i"""--- reduce: @@ -896,8 +862,6 @@ class Inliner(val call: tpd.Tree)(using Context): | scrutinee: $sel : ${selType} | patterns : ${tree.cases.map(patStr).mkString("\n ")}""" errorTree(tree, msg) - } - } override def newLikeThis(nestingLevel: Int): Typer = new InlineTyper(initialErrorCount, nestingLevel) @@ -934,19 +898,18 @@ class Inliner(val call: tpd.Tree)(using Context): /** Drop any side-effect-free bindings that are unused in expansion or other reachable bindings. * Inline def bindings that are used only once. */ - private def dropUnusedDefs(bindings: List[MemberDef], tree: Tree)(using Context): (List[MemberDef], Tree) = { + private def dropUnusedDefs(bindings: List[MemberDef], tree: Tree)(using Context): (List[MemberDef], Tree) = // inlining.println(i"drop unused $bindings%, % in $tree") val (termBindings, typeBindings) = bindings.partition(_.symbol.isTerm) - if (typeBindings.nonEmpty) { + if (typeBindings.nonEmpty) val typeBindingsSet = typeBindings.foldLeft[SimpleIdentitySet[Symbol]](SimpleIdentitySet.empty)(_ + _.symbol) val inlineTypeBindings = new TreeTypeMap( typeMap = new TypeMap() { - override def apply(tp: Type): Type = tp match { + override def apply(tp: Type): Type = tp match case tr: TypeRef if tr.prefix.eq(NoPrefix) && typeBindingsSet.contains(tr.symbol) => val TypeAlias(res) = tr.info: @unchecked res case tp => mapOver(tp) - } }, treeMap = { case ident: Ident if ident.isType && typeBindingsSet.contains(ident.symbol) => @@ -957,58 +920,50 @@ class Inliner(val call: tpd.Tree)(using Context): ) val Block(termBindings1, tree1) = inlineTypeBindings(Block(termBindings, tree)) dropUnusedDefs(termBindings1.asInstanceOf[List[ValOrDefDef]], tree1) - } - else { + else val refCount = MutableSymbolMap[Int]() val bindingOfSym = MutableSymbolMap[MemberDef]() - def isInlineable(binding: MemberDef) = binding match { + def isInlineable(binding: MemberDef) = binding match case ddef @ DefDef(_, Nil, _, _) => isElideableExpr(ddef.rhs) case vdef @ ValDef(_, _, _) => isElideableExpr(vdef.rhs) case _ => false - } - for (binding <- bindings if isInlineable(binding)) { + for (binding <- bindings if isInlineable(binding)) refCount(binding.symbol) = 0 bindingOfSym(binding.symbol) = binding - } def updateRefCount(sym: Symbol, inc: Int) = for (x <- refCount.get(sym)) refCount(sym) = x + inc def updateTermRefCounts(tree: Tree) = - tree.typeOpt.foreachPart { + tree.typeOpt.foreachPart: case ref: TermRef => updateRefCount(ref.symbol, 2) // can't be inlined, so make sure refCount is at least 2 case _ => - } def countRefs(tree: Tree) = - tree.foreachSubTree { + tree.foreachSubTree: case t: RefTree => updateRefCount(t.symbol, 1) updateTermRefCounts(t) case t @ (_: New | _: TypeTree) => updateTermRefCounts(t) case _ => - } countRefs(tree) for (binding <- bindings) countRefs(binding) def retain(boundSym: Symbol) = { - refCount.get(boundSym) match { + refCount.get(boundSym) match case Some(x) => x > 1 || x == 1 && !boundSym.is(Method) case none => true - } } && !boundSym.is(Inline) - val inlineBindings = new TreeMap { - override def transform(t: Tree)(using Context) = t match { + val inlineBindings = new TreeMap: + override def transform(t: Tree)(using Context) = t match case t: RefTree => val sym = t.symbol - val t1 = refCount.get(sym) match { + val t1 = refCount.get(sym) match case Some(1) => - bindingOfSym(sym) match { + bindingOfSym(sym) match case binding: ValOrDefDef => integrate(binding.rhs, sym) - } case none => t - } super.transform(t1) case t: Apply => val t1 = super.transform(t) @@ -1017,20 +972,15 @@ class Inliner(val call: tpd.Tree)(using Context): super.transform(expr) case _ => super.transform(t) - } - } val retained = bindings.filterConserve(binding => retain(binding.symbol)) if (retained `eq` bindings) (bindings, tree) - else { + else val expanded = inlineBindings.transform(tree) dropUnusedDefs(retained, expanded) - } - } - } - private def expandMacro(body: Tree, splicePos: SrcPos)(using Context) = { + private def expandMacro(body: Tree, splicePos: SrcPos)(using Context) = assert(level == 0) val inlinedFrom = enclosingInlineds.last val dependencies = macroDependencies(body)(using spliceContext) @@ -1044,19 +994,15 @@ class Inliner(val call: tpd.Tree)(using Context): if suspendable then ctx.compilationUnit.suspend() // this throws a SuspendException - val evaluatedSplice = inContext(quoted.MacroExpansion.context(inlinedFrom)) { + val evaluatedSplice = inContext(quoted.MacroExpansion.context(inlinedFrom)): Splicer.splice(body, splicePos, inlinedFrom.srcPos, MacroClassLoader.fromContext) - } - val inlinedNormailizer = new TreeMap { - override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match { + val inlinedNormailizer = new TreeMap: + override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match case Inlined(EmptyTree, Nil, expr) if enclosingInlineds.isEmpty => transform(expr) case _ => super.transform(tree) - } - } val normalizedSplice = inlinedNormailizer.transform(evaluatedSplice) if (normalizedSplice.isEmpty) normalizedSplice else normalizedSplice.withSpan(splicePos.span) - } /** Return the set of symbols that are referred at level -1 by the tree and defined in the current run. * This corresponds to the symbols that will need to be interpreted. @@ -1064,11 +1010,10 @@ class Inliner(val call: tpd.Tree)(using Context): private def macroDependencies(tree: Tree)(using Context) = new TreeAccumulator[List[Symbol]] { override def apply(syms: List[Symbol], tree: tpd.Tree)(using Context): List[Symbol] = - tree match { + tree match case tree: RefTree if tree.isTerm && level == -1 && tree.symbol.isDefinedInCurrentRun && !tree.symbol.isLocal => foldOver(tree.symbol :: syms, tree) case _: TypTree => syms case _ => foldOver(syms, tree) - } }.apply(Nil, tree) end Inliner diff --git a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala index 36dc8a642afc..81fed97e4409 100644 --- a/compiler/src/dotty/tools/dotc/inlines/Inlines.scala +++ b/compiler/src/dotty/tools/dotc/inlines/Inlines.scala @@ -52,7 +52,7 @@ object Inlines: meth.is(Inline) && meth.hasAnnotation(defn.BodyAnnot) && !inInlineMethod /** Should call be inlined in this context? */ - def needsInlining(tree: Tree)(using Context): Boolean = tree match { + def needsInlining(tree: Tree)(using Context): Boolean = tree match case Block(_, expr) => needsInlining(expr) case _ => isInlineable(tree.symbol) @@ -64,7 +64,6 @@ object Inlines: ) && !ctx.typer.hasInliningErrors && !ctx.base.stopInlining - } private def needsTransparentInlining(tree: Tree)(using Context): Boolean = tree.symbol.is(Transparent) @@ -113,7 +112,7 @@ object Inlines: * the `bindings` buffer. This is done as an optimization to keep * inline call expansions smaller. */ - def liftBindings(tree: Tree, liftPos: Tree => Tree): Tree = tree match { + def liftBindings(tree: Tree, liftPos: Tree => Tree): Tree = tree match case Block(stats, expr) => bindings ++= stats.map(liftPos) liftBindings(expr, liftPos) @@ -137,7 +136,6 @@ object Inlines: cpy.Select(tree)(liftBindings(qual, liftPos), name) case _ => tree - } // assertAllPositioned(tree) // debug val tree1 = liftBindings(tree, identity) @@ -253,12 +251,10 @@ object Inlines: val curSource = ctx.compilationUnit.source // Tree copier that changes the source of all trees to `curSource` - val cpyWithNewSource = new TypedTreeCopier { + val cpyWithNewSource = new TypedTreeCopier: override protected def sourceFile(tree: tpd.Tree): SourceFile = curSource - override protected val untpdCpy: untpd.UntypedTreeCopier = new untpd.UntypedTreeCopier { + override protected val untpdCpy: untpd.UntypedTreeCopier = new untpd.UntypedTreeCopier: override protected def sourceFile(tree: untpd.Tree): SourceFile = curSource - } - } /** Removes all Inlined trees, replacing them with blocks. * Repositions all trees directly inside an inlined expansion of a non empty call to the position of the call. @@ -267,15 +263,15 @@ object Inlines: * Until we implement JSR-45, we cannot represent in output positions in other source files. * So, reposition inlined code from other files with the call position. */ - class Reposition extends TreeMap(cpyWithNewSource) { + class Reposition extends TreeMap(cpyWithNewSource): - override def transform(tree: Tree)(using Context): Tree = { + override def transform(tree: Tree)(using Context): Tree = def fixSpan[T <: untpd.Tree](copied: T): T = copied.withSpan(if tree.source == curSource then tree.span else callSpan) def finalize(copied: untpd.Tree) = fixSpan(copied).withAttachmentsFrom(tree).withTypeUnchecked(tree.tpe) - inContext(ctx.withSource(curSource)) { + inContext(ctx.withSource(curSource)): tree match case tree: Ident => finalize(untpd.Ident(tree.name)(curSource)) case tree: Literal => finalize(untpd.Literal(tree.const)(curSource)) @@ -287,9 +283,6 @@ object Inlines: case tree: DefTree => super.transform(tree).setDefTree case EmptyTree => tree case _ => fixSpan(super.transform(tree)) - } - } - } (new Reposition).transform(tree) end reposition @@ -301,12 +294,11 @@ object Inlines: * The trace has enough info to completely reconstruct positions. * Note: For macros it returns a Select and for other inline methods it returns an Ident (this distinction is only temporary to be able to run YCheckPositions) */ - def inlineCallTrace(callSym: Symbol, pos: SourcePosition)(using Context): Tree = { + def inlineCallTrace(callSym: Symbol, pos: SourcePosition)(using Context): Tree = assert(ctx.source == pos.source) val topLevelCls = callSym.topLevelClass if (callSym.is(Macro)) ref(topLevelCls.owner).select(topLevelCls.name)(using ctx.withOwner(topLevelCls.owner)).withSpan(pos.span) else Ident(topLevelCls.typeRef).withSpan(pos.span) - } private object Intrinsics: import dotty.tools.dotc.reporting.Diagnostic.Error @@ -315,12 +307,11 @@ object Inlines: private def compileForErrors(tree: Tree)(using Context): List[(ErrorKind, Error)] = assert(tree.symbol == defn.CompiletimeTesting_typeChecks || tree.symbol == defn.CompiletimeTesting_typeCheckErrors) - def stripTyped(t: Tree): Tree = t match { + def stripTyped(t: Tree): Tree = t match case Typed(t2, _) => stripTyped(t2) case Block(Nil, t2) => stripTyped(t2) case Inlined(_, Nil, t2) => stripTyped(t2) case _ => t - } val Apply(_, codeArg :: Nil) = tree: @unchecked val codeArg1 = stripTyped(codeArg.underlying) @@ -328,10 +319,10 @@ object Inlines: if Inlines.isInlineable(codeArg1.symbol) then stripTyped(Inlines.inlineCall(codeArg1)) else codeArg1 - ConstFold(underlyingCodeArg).tpe.widenTermRefExpr match { + ConstFold(underlyingCodeArg).tpe.widenTermRefExpr match case ConstantType(Constant(code: String)) => val source2 = SourceFile.virtual("tasty-reflect", code) - inContext(ctx.fresh.setNewTyperState().setTyper(new Typer(ctx.nestingLevel + 1)).setSource(source2)) { + inContext(ctx.fresh.setNewTyperState().setTyper(new Typer(ctx.nestingLevel + 1)).setSource(source2)): val tree2 = new Parser(source2).block() if ctx.reporter.allErrors.nonEmpty then ctx.reporter.allErrors.map((ErrorKind.Parser, _)) @@ -346,11 +337,9 @@ object Inlines: case _ => case _ => ctx.reporter.allErrors.map((ErrorKind.Typer, _)) - } case t => report.error(em"argument to compileError must be a statically known String but was: $codeArg", codeArg1.srcPos) Nil - } private def packError(kind: ErrorKind, error: Error)(using Context): Tree = def lit(x: Any) = Literal(Constant(x)) @@ -410,35 +399,31 @@ object Inlines: // Special handling of `constValue[T]`, `constValueOpt[T], and summonInline[T]` if callTypeArgs.length == 1 then - if (inlinedMethod == defn.Compiletime_constValue) { + if (inlinedMethod == defn.Compiletime_constValue) val constVal = tryConstValue if constVal.isEmpty then val msg = em"not a constant type: ${callTypeArgs.head}; cannot take constValue" return ref(defn.Predef_undefined).withSpan(call.span).withType(ErrorType(msg)) else return constVal - } - else if (inlinedMethod == defn.Compiletime_constValueOpt) { + else if (inlinedMethod == defn.Compiletime_constValueOpt) val constVal = tryConstValue return ( if (constVal.isEmpty) ref(defn.NoneModule.termRef) else New(defn.SomeClass.typeRef.appliedTo(constVal.tpe), constVal :: Nil) ) - } - else if (inlinedMethod == defn.Compiletime_summonInline) { + else if (inlinedMethod == defn.Compiletime_summonInline) def searchImplicit(tpt: Tree) = val evTyper = new Typer(ctx.nestingLevel + 1) val evCtx = ctx.fresh.setTyper(evTyper) - inContext(evCtx) { + inContext(evCtx): val evidence = evTyper.inferImplicitArg(tpt.tpe, tpt.span) evidence.tpe match case fail: Implicits.SearchFailureType => errorTree(call, evTyper.missingArgMsg(evidence, tpt.tpe, "")) case _ => evidence - } return searchImplicit(callTypeArgs.head) - } end if val (bindings, expansion) = super.inlined(rhsToInline) diff --git a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala index 060c8d21f390..d3bba2a2cb54 100644 --- a/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala +++ b/compiler/src/dotty/tools/dotc/inlines/PrepareInlineable.scala @@ -24,7 +24,7 @@ import config.Printers.inlining import util.Property import staging.StagingLevel -object PrepareInlineable { +object PrepareInlineable: import tpd._ private val InlineAccessorsKey = new Property.Key[InlineAccessors] @@ -40,20 +40,19 @@ object PrepareInlineable { case Some(inlineAccessors) => inlineAccessors.addAccessorDefs(cls, body) case _ => body - class InlineAccessors extends AccessProxies { + class InlineAccessors extends AccessProxies: /** If an inline accessor name wraps a unique inline name, this is taken as indication * that the inline accessor takes its receiver as first parameter. Such accessors * are created by MakeInlineablePassing. */ - override def passReceiverAsArg(name: Name)(using Context): Boolean = name match { + override def passReceiverAsArg(name: Name)(using Context): Boolean = name match case InlineAccessorName(UniqueInlineName(_, _)) => true case _ => false - } /** A tree map which inserts accessors for non-public term members accessed from inlined code. */ - abstract class MakeInlineableMap(val inlineSym: Symbol) extends TreeMap with Insert { + abstract class MakeInlineableMap(val inlineSym: Symbol) extends TreeMap with Insert: def accessorNameOf(name: TermName, site: Symbol)(using Context): TermName = val accName = InlineAccessorName(name) if site.isExtensibleClass then accName.expandedName(site) else accName @@ -78,35 +77,30 @@ object PrepareInlineable { def preTransform(tree: Tree)(using Context): Tree - def postTransform(tree: Tree)(using Context): Tree = tree match { + def postTransform(tree: Tree)(using Context): Tree = tree match case Assign(lhs, rhs) if lhs.symbol.name.is(InlineAccessorName) => cpy.Apply(tree)(useSetter(lhs), rhs :: Nil) case _ => tree - } override def transform(tree: Tree)(using Context): Tree = postTransform(super.transform(preTransform(tree))) - } /** Direct approach: place the accessor with the accessed symbol. This has the * advantage that we can re-use the receiver as is. But it is only * possible if the receiver is essentially this or an outer this, which is indicated * by the test that we can find a host for the accessor. */ - class MakeInlineableDirect(inlineSym: Symbol) extends MakeInlineableMap(inlineSym) { - def preTransform(tree: Tree)(using Context): Tree = tree match { + class MakeInlineableDirect(inlineSym: Symbol) extends MakeInlineableMap(inlineSym): + def preTransform(tree: Tree)(using Context): Tree = tree match case tree: RefTree if needsAccessor(tree.symbol) => - if (tree.symbol.isConstructor) { + if (tree.symbol.isConstructor) report.error("Implementation restriction: cannot use private constructors in inline methods", tree.srcPos) tree // TODO: create a proper accessor for the private constructor - } else useAccessor(tree) case _ => tree - } override def ifNoHost(reference: RefTree)(using Context): Tree = reference - } /** Fallback approach if the direct approach does not work: Place the accessor method * in the same class as the inline method, and let it take the receiver as parameter. @@ -137,9 +131,9 @@ object PrepareInlineable { * Since different calls might have different receiver types, we need to generate one * such accessor per call, so they need to have unique names. */ - class MakeInlineablePassing(inlineSym: Symbol) extends MakeInlineableMap(inlineSym) { + class MakeInlineablePassing(inlineSym: Symbol) extends MakeInlineableMap(inlineSym): - def preTransform(tree: Tree)(using Context): Tree = tree match { + def preTransform(tree: Tree)(using Context): Tree = tree match case _: Apply | _: TypeApply | _: RefTree if needsAccessor(tree.symbol) && tree.isTerm && !tree.symbol.isConstructor => val refPart = funPart(tree) @@ -149,9 +143,8 @@ object PrepareInlineable { // Need to dealias in order to catch all possible references to abstracted over types in // substitutions - val dealiasMap = new TypeMap { + val dealiasMap = new TypeMap: def apply(t: Type) = mapOver(t.dealias) - } val qualType = dealiasMap(qual.tpe.widen) // The types that are local to the inline method, and that therefore have @@ -160,11 +153,10 @@ object PrepareInlineable { ref.isType && ref.symbol.isContainedIn(inlineSym)).toList // Add qualifier type as leading method argument to argument `tp` - def addQualType(tp: Type): Type = tp match { + def addQualType(tp: Type): Type = tp match case tp: PolyType => tp.derivedLambdaType(tp.paramNames, tp.paramInfos, addQualType(tp.resultType)) case tp: ExprType => addQualType(tp.resultType) case tp => MethodType(qualType.simplified :: Nil, tp) - } // Abstract accessed type over local refs def abstractQualType(mtpe: Type): Type = @@ -202,8 +194,6 @@ object PrepareInlineable { tree case _ => tree - } - } /** Adds accessors for all non-public term members accessed * from `tree`. Non-public type members are currently left as they are. @@ -213,7 +203,7 @@ object PrepareInlineable { * @return If there are accessors generated, a thicket consisting of the rewritten `tree` * and all accessors, otherwise the original tree. */ - def makeInlineable(tree: Tree)(using Context): Tree = { + def makeInlineable(tree: Tree)(using Context): Tree = val inlineSym = ctx.owner if (inlineSym.owner.isTerm) // Inlineable methods in local scopes can only be called in the scope they are defined, @@ -222,8 +212,6 @@ object PrepareInlineable { else new MakeInlineablePassing(inlineSym).transform( new MakeInlineableDirect(inlineSym).transform(tree)) - } - } def isLocalOrParam(sym: Symbol, inlineMethod: Symbol)(using Context): Boolean = sym.isContainedIn(inlineMethod) && sym != inlineMethod @@ -257,11 +245,11 @@ object PrepareInlineable { */ def registerInlineInfo( inlined: Symbol, treeExpr: Context ?=> Tree)(using Context): Unit = - inlined.unforcedAnnotation(defn.BodyAnnot) match { + inlined.unforcedAnnotation(defn.BodyAnnot) match case Some(ann: ConcreteBodyAnnotation) => case Some(ann: LazyBodyAnnotation) if ann.isEvaluated || ann.isEvaluating => case _ => - if (!ctx.isAfterTyper) { + if (!ctx.isAfterTyper) val inlineCtx = ctx inlined.updateAnnotation(LazyBodyAnnotation { given ctx: Context = inlineCtx @@ -273,16 +261,14 @@ object PrepareInlineable { inlining.println(i"Body to inline for $inlined: $inlinedBody") inlinedBody }) - } - } - private def checkInlineMethod(inlined: Symbol, body: Tree)(using Context): body.type = { + private def checkInlineMethod(inlined: Symbol, body: Tree)(using Context): body.type = if Inlines.inInlineMethod(using ctx.outer) then report.error(em"Implementation restriction: nested inline methods are not supported", inlined.srcPos) - if (inlined.is(Macro) && !ctx.isAfterTyper) { + if (inlined.is(Macro) && !ctx.isAfterTyper) - def checkMacro(tree: Tree): Unit = tree match { + def checkMacro(tree: Tree): Unit = tree match case Splice(code) => if (code.symbol.flags.is(Inline)) report.error("Macro cannot be implemented with an `inline` method", code.srcPos) @@ -308,9 +294,5 @@ object PrepareInlineable { | * The contents of the splice must call a static method | * All arguments must be quoted """.stripMargin, inlined.srcPos) - } checkMacro(body) - } body - } -} diff --git a/compiler/src/dotty/tools/dotc/interactive/Completion.scala b/compiler/src/dotty/tools/dotc/interactive/Completion.scala index e4d0cce9f6f9..f433839db43f 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Completion.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Completion.scala @@ -37,7 +37,7 @@ import scala.util.control.NonFatal */ case class Completion(label: String, description: String, symbols: List[Symbol]) -object Completion { +object Completion: import dotty.tools.dotc.ast.tpd._ @@ -45,10 +45,9 @@ object Completion { * * @return offset and list of symbols for possible completions */ - def completions(pos: SourcePosition)(using Context): (Int, List[Completion]) = { + def completions(pos: SourcePosition)(using Context): (Int, List[Completion]) = val path = Interactive.pathTo(ctx.compilationUnit.tpdTree, pos.span) computeCompletions(pos, path)(using Interactive.contextOfPath(path).withPhase(Phases.typerPhase)) - } /** * Inspect `path` to determine what kinds of symbols should be considered. @@ -61,7 +60,7 @@ object Completion { * Otherwise, provide no completion suggestion. */ def completionMode(path: List[Tree], pos: SourcePosition): Mode = - path match { + path match case Ident(_) :: Import(_, _) :: _ => Mode.ImportOrExport case (ref: RefTree) :: _ => if (ref.name.isTermName) Mode.Term @@ -74,7 +73,6 @@ object Completion { case (_: ImportOrExport) :: _ => Mode.ImportOrExport case _ => Mode.None - } /** When dealing with in varios palces we check to see if they are * due to incomplete backticks. If so, we ensure we get the full prefix @@ -124,12 +122,11 @@ object Completion { /** Inspect `path` to determine the offset where the completion result should be inserted. */ def completionOffset(path: List[Tree]): Int = - path match { + path match case (ref: RefTree) :: _ => ref.span.point case _ => 0 - } - private def computeCompletions(pos: SourcePosition, path: List[Tree])(using Context): (Int, List[Completion]) = { + private def computeCompletions(pos: SourcePosition, path: List[Tree])(using Context): (Int, List[Completion]) = val mode = completionMode(path, pos) val rawPrefix = completionPrefix(path, pos) @@ -138,7 +135,7 @@ object Completion { val completer = new Completer(mode, prefix, pos) - val completions = path match { + val completions = path match // Ignore synthetic select from `This` because in code it was `Ident` // See example in dotty.tools.languageserver.CompletionTest.syntheticThis case Select(qual @ This(_), _) :: _ if qual.span.isSynthetic => completer.scopeCompletions @@ -147,7 +144,6 @@ object Completion { case (tree: ImportOrExport) :: _ => completer.directMemberCompletions(tree.expr) case (_: untpd.ImportSelector) :: Import(expr, _) :: _ => completer.directMemberCompletions(expr) case _ => completer.scopeCompletions - } val describedCompletions = describeCompletions(completions) val backtickedCompletions = @@ -161,7 +157,6 @@ object Completion { | type = ${completer.mode.is(Mode.Type)} | results = $backtickedCompletions%, %""") (offset, backtickedCompletions) - } def backtickCompletions(completion: Completion, hasBackTick: Boolean) = if hasBackTick || needsBacktick(completion.label) then @@ -216,7 +211,7 @@ object Completion { * For the results of all `xyzCompletions` methods term names and type names are always treated as different keys in the same map * and they never conflict with each other. */ - class Completer(val mode: Mode, val prefix: String, pos: SourcePosition) { + class Completer(val mode: Mode, val prefix: String, pos: SourcePosition): /** Completions for terms and types that are currently in scope: * the members of the current class, local definitions and the symbols that have been imported, * recursively adding completions from outer scopes. @@ -230,7 +225,7 @@ object Completion { * (even if the import follows it syntactically) * - a more deeply nested import shadowing a member or a local definition causes an ambiguity */ - def scopeCompletions(using context: Context): CompletionMap = { + def scopeCompletions(using context: Context): CompletionMap = val mappings = collection.mutable.Map.empty[Name, List[ScopedDenotations]].withDefaultValue(List.empty) def addMapping(name: Name, denots: ScopedDenotations) = mappings(name) = mappings(name) :+ denots @@ -283,7 +278,7 @@ object Completion { catch case NonFatal(_) => false - denotss.find(!_.ctx.isImportContext) match { + denotss.find(!_.ctx.isImportContext) match // most deeply nested member or local definition if not shadowed by an import case Some(local) if local.ctx.scope == first.ctx.scope => resultMappings += name -> local.denots @@ -291,18 +286,15 @@ object Completion { case None if isSingleImport || isImportedInDifferentScope || isSameSymbolImportedDouble => resultMappings += name -> first.denots case None if isJavaLangAndScala => - denotss.foreach{ + denotss.foreach: denots => if isScalaPackage(denots) then resultMappings += name -> denots.denots - } case _ => - } } resultMappings - } /** Widen only those types which are applied or are exactly nothing */ @@ -335,7 +327,7 @@ object Completion { /** Completions introduced by imports directly in this context. * Completions from outer contexts are not included. */ - private def importedCompletions(using Context): CompletionMap = { + private def importedCompletions(using Context): CompletionMap = val imp = ctx.importInfo def fromImport(name: Name, nameInScope: Name): Seq[(Name, SingleDenotation)] = @@ -370,7 +362,6 @@ object Completion { }.toSeq.groupByName givenImports ++ wildcardMembers ++ explicitMembers - } /** Completions from implicit conversions including old style extensions using implicit classes */ private def implicitConversionMemberCompletions(qual: Tree)(using Context): CompletionMap = @@ -412,9 +403,8 @@ object Completion { // 1. The extension method is visible under a simple name, by being defined or inherited or imported in a scope enclosing the reference. val termCompleter = new Completer(Mode.Term, prefix, pos) - val extMethodsInScope = termCompleter.scopeCompletions.toList.flatMap { + val extMethodsInScope = termCompleter.scopeCompletions.toList.flatMap: case (name, denots) => denots.collect { case d: SymDenotation if d.isTerm => (d.termRef, name.asTermName) } - } // 2. The extension method is a member of some given instance that is visible at the point of the reference. val givensInScope = ctx.implicits.eligible(defn.AnyType).map(_.implicitRef.underlyingRef) @@ -429,13 +419,12 @@ object Completion { val extMethodsFromGivensInImplicitScope = extractMemberExtensionMethods(givensInImplicitScope) val availableExtMethods = extMethodsFromGivensInImplicitScope ++ extMethodsFromImplicitScope ++ extMethodsFromGivensInScope ++ extMethodsInScope - val extMethodsWithAppliedReceiver = availableExtMethods.flatMap { + val extMethodsWithAppliedReceiver = availableExtMethods.flatMap: case (termRef, termName) => if termRef.symbol.is(ExtensionMethod) && !qual.tpe.isBottomType then tryApplyingReceiverToExtension(termRef) .map(denot => termName -> denot) else None - } extMethodsWithAppliedReceiver.groupByName /** Include in completion sets only symbols that @@ -483,7 +472,7 @@ object Completion { /** @param site The type to inspect. * @return The members of `site` that are accessible and pass the include filter. */ - private def accessibleMembers(site: Type)(using Context): Seq[SingleDenotation] = { + private def accessibleMembers(site: Type)(using Context): Seq[SingleDenotation] = def appendMemberSyms(name: Name, buf: mutable.Buffer[SingleDenotation]): Unit = try val member = site.member(name) @@ -494,14 +483,12 @@ object Completion { catch case ex: TypeError => - val members = site.memberDenots(completionsFilter, appendMemberSyms).collect { + val members = site.memberDenots(completionsFilter, appendMemberSyms).collect: case mbr if include(mbr, mbr.name) && mbr.symbol.isAccessibleFrom(site) => mbr - } val refinements = extractRefinements(site).filter(mbr => include(mbr, mbr.name)) members ++ refinements - } /** * Given `qual` of type T, finds all the types S such that there exists an implicit conversion @@ -510,21 +497,19 @@ object Completion { * @param qual The argument to which the implicit conversion should be applied. * @return The set of types after `qual` implicit conversion. */ - private def implicitConversionTargets(qual: Tree)(using Context): Set[Type] = { + private def implicitConversionTargets(qual: Tree)(using Context): Set[Type] = val typer = ctx.typer val conversions = new typer.ImplicitSearch(defn.AnyType, qual, pos.span).allImplicits val targets = conversions.map(_.tree.tpe) interactiv.println(i"implicit conversion targets considered: ${targets.toList}%, %") targets - } /** Filter for names that should appear when looking for completions. */ - private object completionsFilter extends NameFilter { + private object completionsFilter extends NameFilter: def apply(pre: Type, name: Name)(using Context): Boolean = !name.isConstructorName && name.toTermName.info.kind == SimpleNameKind def isStable = true - } extension (denotations: Seq[SingleDenotation]) def groupByName(using Context): CompletionMap = denotations.groupBy(_.name) @@ -532,7 +517,6 @@ object Completion { extension [N <: Name](namedDenotations: Seq[(N, SingleDenotation)]) @annotation.targetName("groupByNameTupled") def groupByName: CompletionMap = namedDenotations.groupMap((name, denot) => name)((name, denot) => denot) - } private type CompletionMap = Map[Name, Seq[SingleDenotation]] @@ -545,11 +529,10 @@ object Completion { * The completion mode: defines what kinds of symbols should be included in the completion * results. */ - class Mode(val bits: Int) extends AnyVal { + class Mode(val bits: Int) extends AnyVal: def is(other: Mode): Boolean = (bits & other.bits) == other.bits def |(other: Mode): Mode = new Mode(bits | other.bits) - } - object Mode { + object Mode: /** No symbol should be included */ val None: Mode = new Mode(0) @@ -561,6 +544,4 @@ object Completion { /** Both term and type symbols are allowed */ val ImportOrExport: Mode = new Mode(4) | Term | Type - } -} diff --git a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala index fd6d426f39bb..c62c4b90b436 100644 --- a/compiler/src/dotty/tools/dotc/interactive/Interactive.scala +++ b/compiler/src/dotty/tools/dotc/interactive/Interactive.scala @@ -17,11 +17,11 @@ import util.Spans._, util.SourceFile, util.SourcePosition * * @see `InteractiveDriver` to get typed trees from code. */ -object Interactive { +object Interactive: import ast.tpd._ - object Include { - case class Set private[Include] (val bits: Int) extends AnyVal { + object Include: + case class Set private[Include] (val bits: Int) extends AnyVal: def | (that: Set): Set = Set(bits | that.bits) def except(that: Set): Set = Set(bits & ~that.bits) @@ -33,7 +33,6 @@ object Interactive { def isLinkedClass: Boolean = (bits & linkedClass.bits) != 0 def isImports: Boolean = (bits & imports.bits) != 0 def isLocal: Boolean = (bits & local.bits) != 0 - } /** The empty set */ val empty: Set = Set(0) @@ -61,18 +60,16 @@ object Interactive { /** All the flags */ val all: Set = Set(~0) - } /** Does this tree define a symbol ? */ def isDefinition(tree: Tree): Boolean = tree.isInstanceOf[NamedDefTree] /** The type of the closest enclosing tree with a type containing position `pos`. */ - def enclosingType(trees: List[SourceTree], pos: SourcePosition)(using Context): Type = { + def enclosingType(trees: List[SourceTree], pos: SourcePosition)(using Context): Type = val path = pathTo(trees, pos) if (path.isEmpty) NoType else path.head.tpe - } /** The closest enclosing tree with a symbol containing position `pos`, or the `EmptyTree`. */ @@ -95,8 +92,8 @@ object Interactive { * * @see sourceSymbol */ - def enclosingSourceSymbols(path: List[Tree], pos: SourcePosition)(using Context): List[Symbol] = { - val syms = path match { + def enclosingSourceSymbols(path: List[Tree], pos: SourcePosition)(using Context): List[Symbol] = + val syms = path match // For a named arg, find the target `DefDef` and jump to the param case NamedArg(name, _) :: Apply(fn, _) :: _ => val funSym = fn.symbol @@ -104,7 +101,7 @@ object Interactive { && funSym.is(Synthetic) && funSym.owner.is(CaseClass)) List(funSym.owner.info.member(name).symbol) - else { + else val classTree = funSym.topLevelClass.asClass.rootTree val paramSymbol = for { @@ -113,7 +110,6 @@ object Interactive { } yield param.symbol List(paramSymbol.getOrElse(fn.symbol)) - } // For constructor calls, return the `` that was selected case _ :: (_: New) :: (select: Select) :: _ => @@ -127,10 +123,8 @@ object Interactive { case _ => List(enclosingTree(path).symbol) - } syms.map(_.sourceSymbol).filter(_.exists) - } /** Check if `tree` matches `sym`. * This is the case if the symbol defined by `tree` equals `sym`, @@ -138,7 +132,7 @@ object Interactive { * or `include` is `overridden`, and `tree` is overridden by `sym`, * or `include` is `overriding`, and `tree` overrides `sym`. */ - def matchSymbol(tree: Tree, sym: Symbol, include: Include.Set)(using Context): Boolean = { + def matchSymbol(tree: Tree, sym: Symbol, include: Include.Set)(using Context): Boolean = def overrides(sym1: Symbol, sym2: Symbol) = sym1.owner.derivesFrom(sym2.owner) && sym1.overriddenSymbol(sym2.owner.asClass) == sym2 @@ -150,7 +144,6 @@ object Interactive { || include.isOverriding && overrides(tree.symbol, sym) ) ) - } /** Find named trees with a non-empty position whose symbol match `sym` in `trees`. * @@ -159,7 +152,7 @@ object Interactive { * source code. */ def namedTrees(trees: List[SourceTree], include: Include.Set, sym: Symbol) - (using Context): List[SourceTree] = + (using Context): List[SourceTree] = if (!sym.exists) Nil else @@ -175,12 +168,12 @@ object Interactive { def namedTrees(trees: List[SourceTree], include: Include.Set, treePredicate: NameTree => Boolean = util.common.alwaysTrue - )(using Context): List[SourceTree] = safely { + )(using Context): List[SourceTree] = safely: val buf = new mutable.ListBuffer[SourceTree] def traverser(source: SourceFile) = - new untpd.TreeTraverser { - private def handle(utree: untpd.NameTree): Unit = { + new untpd.TreeTraverser: + private def handle(utree: untpd.NameTree): Unit = val tree = utree.asInstanceOf[tpd.NameTree] if (tree.symbol.exists && tree.name != StdNames.nme.ERROR @@ -191,9 +184,8 @@ object Interactive { && (include.isReferences || isDefinition(tree)) && treePredicate(tree)) buf += SourceTree(tree, source) - } override def traverse(tree: untpd.Tree)(using Context) = - tree match { + tree match case imp: untpd.Import if include.isImports && tree.hasType => val tree = imp.asInstanceOf[tpd.Import] val selections = tpd.importSelections(tree) @@ -209,13 +201,10 @@ object Interactive { traverse(tree.call) case _ => traverseChildren(tree) - } - } trees.foreach(t => traverser(t.source).traverse(t.tree)) buf.toList - } /** * Find trees that match `symbol` in `trees`. @@ -229,7 +218,7 @@ object Interactive { includes: Include.Set, symbol: Symbol, predicate: NameTree => Boolean = util.common.alwaysTrue - )(using Context): List[SourceTree] = { + )(using Context): List[SourceTree] = val linkedSym = symbol.linkedClass val fullPredicate: NameTree => Boolean = tree => ( (includes.isDefinitions || !Interactive.isDefinition(tree)) @@ -242,7 +231,6 @@ object Interactive { && predicate(tree) ) namedTrees(trees, includes, fullPredicate) - } /** The reverse path to the node that closest encloses position `pos`, * or `Nil` if no such path exists. If a non-empty path is returned it starts with @@ -266,7 +254,7 @@ object Interactive { .dropWhile(!_.hasType).asInstanceOf[List[tpd.Tree]] else Nil - def contextOfStat(stats: List[Tree], stat: Tree, exprOwner: Symbol, ctx: Context): Context = stats match { + def contextOfStat(stats: List[Tree], stat: Tree, exprOwner: Symbol, ctx: Context): Context = stats match case Nil => ctx case first :: _ if first eq stat => @@ -275,14 +263,13 @@ object Interactive { contextOfStat(rest, stat, exprOwner, ctx.importContext(imp, inContext(ctx){imp.symbol})) case _ :: rest => contextOfStat(rest, stat, exprOwner, ctx) - } - def contextOfPath(path: List[Tree])(using Context): Context = path match { + def contextOfPath(path: List[Tree])(using Context): Context = path match case Nil | _ :: Nil => ctx.fresh case nested :: encl :: rest => val outer = contextOfPath(encl :: rest) - try encl match { + try encl match case tree @ PackageDef(pkg, stats) => assert(tree.symbol.exists) if (nested `eq` pkg) outer @@ -301,28 +288,23 @@ object Interactive { outer case tree @ Block(stats, expr) => val localCtx = outer.fresh.setNewScope - stats.foreach { + stats.foreach: case stat: MemberDef => localCtx.enter(stat.symbol) case _ => - } contextOfStat(stats, nested, ctx.owner, localCtx) case tree @ CaseDef(pat, _, _) => val localCtx = outer.fresh.setNewScope - pat.foreachSubTree { + pat.foreachSubTree: case bind: Bind => localCtx.enter(bind.symbol) case _ => - } localCtx case tree @ Template(constr, _, self, _) => if ((constr :: self :: tree.parentsOrDerived).contains(nested)) outer else contextOfStat(tree.body, nested, tree.symbol, outer.inClassContext(self.symbol)) case _ => outer - } - catch { + catch case ex: CyclicReference => outer - } - } /** The first tree in the path that is a definition. */ def enclosingDefinitionInPath(path: List[Tree])(using Context): Tree = @@ -336,14 +318,13 @@ object Interactive { * @param driver The driver responsible for `path`. * @return The definitions for the symbol at the end of `path`. */ - def findDefinitions(path: List[Tree], pos: SourcePosition, driver: InteractiveDriver): List[SourceTree] = { + def findDefinitions(path: List[Tree], pos: SourcePosition, driver: InteractiveDriver): List[SourceTree] = given Context = driver.currentCtx val enclTree = enclosingTree(path) val includeOverridden = enclTree.isInstanceOf[MemberDef] val symbols = enclosingSourceSymbols(path, pos) val includeExternal = symbols.exists(!_.isLocal) findDefinitions(symbols, driver, includeOverridden, includeExternal) - } /** * Find the definitions of `symbols`. @@ -358,7 +339,7 @@ object Interactive { def findDefinitions(symbols: List[Symbol], driver: InteractiveDriver, includeOverridden: Boolean, - includeExternal: Boolean): List[SourceTree] = { + includeExternal: Boolean): List[SourceTree] = given Context = driver.currentCtx val include = Include.definitions | Include.overriding | (if (includeOverridden) Include.overridden else Include.empty) @@ -370,7 +351,6 @@ object Interactive { else driver.sourceTreesContaining(name) findTreesMatching(trees, include | includeLocal, sym) } - } /** * Given `sym`, originating from `sourceDriver`, find its representation in @@ -381,25 +361,21 @@ object Interactive { * @param targetDriver The driver in which we want to get a representation of `symbol`. * @return A representation of `symbol` in `targetDriver`. */ - def localize(symbol: Symbol, sourceDriver: InteractiveDriver, targetDriver: InteractiveDriver): Symbol = { + def localize(symbol: Symbol, sourceDriver: InteractiveDriver, targetDriver: InteractiveDriver): Symbol = def in[T](driver: InteractiveDriver)(fn: Context ?=> T): T = fn(using driver.currentCtx) if (sourceDriver == targetDriver) symbol - else { - val owners = in(sourceDriver) { + else + val owners = in(sourceDriver): symbol.ownersIterator.toList.reverse.map(_.name) - } - in(targetDriver) { + in(targetDriver): val base: Symbol = defn.RootClass owners.tail.foldLeft(base) { (prefix, symbolName) => if (prefix.exists) prefix.info.member(symbolName).symbol else NoSymbol } - } - } - } /** * Return a predicate function that determines whether a given `NameTree` is an implementation of @@ -409,19 +385,17 @@ object Interactive { * @return A function that determines whether a `NameTree` is an implementation of `sym`. */ def implementationFilter(sym: Symbol)(using Context): NameTree => Boolean = - if (sym.isClass) { + if (sym.isClass) case td: TypeDef => val treeSym = td.symbol (treeSym != sym || !treeSym.isOneOf(AbstractOrTrait)) && treeSym.derivesFrom(sym) case _ => false - } - else { + else case md: MemberDef => matchSymbol(md, sym, Include.overriding) && !md.symbol.is(Deferred) case _ => false - } /** * Is this tree using a renaming introduced by an import statement or an alias for `this`? @@ -430,10 +404,9 @@ object Interactive { * @return True, if this tree's name is different than its symbol's name, indicating that * it uses a renaming introduced by an import statement or an alias for `this`. */ - def isRenamed(tree: NameTree)(using Context): Boolean = { + def isRenamed(tree: NameTree)(using Context): Boolean = val symbol = tree.symbol symbol.exists && !sameName(tree.name, symbol.name) - } /** Are the two names the same? */ def sameName(n0: Name, n1: Name): Boolean = @@ -441,5 +414,4 @@ object Interactive { private[interactive] def safely[T](op: => List[T]): List[T] = try op catch { case ex: TypeError => Nil } -} diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala index 38a93125a342..d037f2c7d5c6 100644 --- a/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala +++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveCompiler.scala @@ -7,7 +7,7 @@ import Phases._ import parsing._ import typer._ -class InteractiveCompiler extends Compiler { +class InteractiveCompiler extends Compiler: // TODO: Figure out what phases should be run in IDEs // More phases increase latency but allow us to report more errors. // This could be improved by reporting errors back to the IDE @@ -18,4 +18,3 @@ class InteractiveCompiler extends Compiler { List(new transform.SetRootTree), List(new transform.CookComments) ) -} diff --git a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala index 132ff162be61..c4d7f2afec91 100644 --- a/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala +++ b/compiler/src/dotty/tools/dotc/interactive/InteractiveDriver.scala @@ -25,12 +25,12 @@ import reporting._ import util._ /** A Driver subclass designed to be used from IDEs */ -class InteractiveDriver(val settings: List[String]) extends Driver { +class InteractiveDriver(val settings: List[String]) extends Driver: import tpd._ override def sourcesRequired: Boolean = false - private val myInitCtx: Context = { + private val myInitCtx: Context = val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions).addMode(Mode.Interactive) rootCtx.setSetting(rootCtx.settings.YretainTrees, true) rootCtx.setSetting(rootCtx.settings.YcookComments, true) @@ -40,21 +40,18 @@ class InteractiveDriver(val settings: List[String]) extends Driver { case None => rootCtx ctx.initialize()(using ctx) ctx - } private var myCtx: Context = myInitCtx def currentCtx: Context = myCtx private val compiler: Compiler = new InteractiveCompiler - private val myOpenedFiles = new mutable.LinkedHashMap[URI, SourceFile] { + private val myOpenedFiles = new mutable.LinkedHashMap[URI, SourceFile]: override def default(key: URI) = NoSource - } def openedFiles: Map[URI, SourceFile] = myOpenedFiles - private val myOpenedTrees = new mutable.LinkedHashMap[URI, List[SourceTree]] { + private val myOpenedTrees = new mutable.LinkedHashMap[URI, List[SourceTree]]: override def default(key: URI) = Nil - } def openedTrees: Map[URI, List[SourceTree]] = myOpenedTrees private val myCompilationUnits = new mutable.LinkedHashMap[URI, CompilationUnit] @@ -69,7 +66,7 @@ class InteractiveDriver(val settings: List[String]) extends Driver { // We also need something like sbt server-mode to be informed of changes on // the classpath. - private val (zipClassPaths, dirClassPaths) = currentCtx.platform.classPath(using currentCtx) match { + private val (zipClassPaths, dirClassPaths) = currentCtx.platform.classPath(using currentCtx) match case AggregateClassPath(cps) => // FIXME: We shouldn't assume that ClassPath doesn't have other // subclasses. For now, the only other subclass is JrtClassPath on Java @@ -81,15 +78,13 @@ class InteractiveDriver(val settings: List[String]) extends Driver { (zipCps, dirCps) case _ => (Seq(), Seq()) - } // Like in `ZipArchiveFileLookup` we assume that zips are immutable - private val zipClassPathClasses: Seq[TypeName] = { + private val zipClassPathClasses: Seq[TypeName] = val names = new mutable.ListBuffer[TypeName] for (cp <- zipClassPaths) classesFromZip(cp.zipFile, names) names - } initialize() @@ -107,9 +102,9 @@ class InteractiveDriver(val settings: List[String]) extends Driver { * This includes the trees for the buffers that are presently open in the IDE, and the trees * from the target directory. */ - def sourceTreesContaining(id: String)(using Context): List[SourceTree] = { + def sourceTreesContaining(id: String)(using Context): List[SourceTree] = val fromBuffers = openedTrees.values.flatten.toList - val fromCompilationOutput = { + val fromCompilationOutput = val classNames = new mutable.ListBuffer[TypeName] val output = ctx.settings.outputDir.value if (output.isDirectory) @@ -119,9 +114,7 @@ class InteractiveDriver(val settings: List[String]) extends Driver { classNames.flatMap { cls => treesFromClassName(cls, id) } - } (fromBuffers ++ fromCompilationOutput).distinct - } /** * All the trees for this project. @@ -137,21 +130,20 @@ class InteractiveDriver(val settings: List[String]) extends Driver { * This includes the trees of the sources of this project, along with the trees that are found * on this project's classpath. */ - def allTreesContaining(id: String)(using Context): List[SourceTree] = { + def allTreesContaining(id: String)(using Context): List[SourceTree] = val fromSource = openedTrees.values.flatten.toList val fromClassPath = (dirClassPathClasses ++ zipClassPathClasses).flatMap { cls => treesFromClassName(cls, id) } (fromSource ++ fromClassPath).distinct - } def run(uri: URI, sourceCode: String): List[Diagnostic] = run(uri, toSource(uri, sourceCode)) - def run(uri: URI, source: SourceFile): List[Diagnostic] = { + def run(uri: URI, source: SourceFile): List[Diagnostic] = import typer.ImportInfo._ val previousCtx = myCtx - try { + try val reporter = new StoreReporter(null) with UniqueMessagePositions with HideNonSensicalMessages @@ -173,88 +165,74 @@ class InteractiveDriver(val settings: List[String]) extends Driver { myCtx = myCtx.fresh.setPhase(myInitCtx.base.typerPhase) reporter.removeBufferedMessages - } - catch { + catch case ex: FatalError => myCtx = previousCtx close(uri) Nil - } - } - def close(uri: URI): Unit = { + def close(uri: URI): Unit = myOpenedFiles.remove(uri) myOpenedTrees.remove(uri) myCompilationUnits.remove(uri) - } /** * The `SourceTree`s that define the class `className` and/or module `className`. * * @see SourceTree.fromSymbol */ - private def treesFromClassName(className: TypeName, id: String)(using Context): List[SourceTree] = { - def trees(className: TypeName, id: String): List[SourceTree] = { + private def treesFromClassName(className: TypeName, id: String)(using Context): List[SourceTree] = + def trees(className: TypeName, id: String): List[SourceTree] = val clsd = staticRef(className) - clsd match { + clsd match case clsd: ClassDenotation => clsd.ensureCompleted() SourceTree.fromSymbol(clsd.symbol.asClass, id) case _ => Nil - } - } trees(className, id) ::: trees(className.moduleClassName, id) - } // FIXME: classfiles in directories may change at any point, so we retraverse // the directories each time, if we knew when classfiles changed (sbt // server-mode might help here), we could do cache invalidation instead. - private def dirClassPathClasses: Seq[TypeName] = { + private def dirClassPathClasses: Seq[TypeName] = val names = new mutable.ListBuffer[TypeName] dirClassPaths.foreach { dirCp => val root = dirCp.dir.toPath classesFromDir(root, names) } names - } /** Adds the names of the classes that are defined in `file` to `buffer`. */ - private def classesFromZip(file: File, buffer: mutable.ListBuffer[TypeName]): Unit = { + private def classesFromZip(file: File, buffer: mutable.ListBuffer[TypeName]): Unit = val zipFile = new ZipFile(file) - try { + try val entries = zipFile.entries() - while (entries.hasMoreElements) { + while (entries.hasMoreElements) val entry = entries.nextElement() val name = entry.getName if name.endsWith(tastySuffix) then buffer += name.replace("/", ".").stripSuffix(tastySuffix).toTypeName - } - } finally zipFile.close() - } /** Adds the names of the classes that are defined in `dir` to `buffer`. */ private def classesFromDir(dir: Path, buffer: mutable.ListBuffer[TypeName]): Unit = try Files.walkFileTree(dir, new SimpleFileVisitor[Path] { - override def visitFile(path: Path, attrs: BasicFileAttributes) = { - if (!attrs.isDirectory) { + override def visitFile(path: Path, attrs: BasicFileAttributes) = + if (!attrs.isDirectory) val name = path.getFileName.toString if name.endsWith(tastySuffix) then buffer += dir.relativize(path).toString.replace("/", ".").stripSuffix(tastySuffix).toTypeName - } FileVisitResult.CONTINUE - } }) - catch { + catch case _: NoSuchFileException => - } - private def topLevelTrees(topTree: Tree, source: SourceFile): List[SourceTree] = { + private def topLevelTrees(topTree: Tree, source: SourceFile): List[SourceTree] = val trees = new mutable.ListBuffer[SourceTree] - def addTrees(tree: Tree): Unit = tree match { + def addTrees(tree: Tree): Unit = tree match case PackageDef(_, stats) => stats.foreach(addTrees) case imp: Import => @@ -262,23 +240,21 @@ class InteractiveDriver(val settings: List[String]) extends Driver { case tree: TypeDef => trees += SourceTree(tree, source) case _ => - } addTrees(topTree) trees.toList - } /** Remove attachments and error out completers. The goal is to avoid * having a completer hanging in a typed tree which can capture the context * of a previous run. Note that typed trees can have untyped or partially * typed children if the source contains errors. */ - private def cleanup(tree: tpd.Tree)(using Context): Unit = { + private def cleanup(tree: tpd.Tree)(using Context): Unit = val seen = mutable.Set.empty[tpd.Tree] - def cleanupTree(tree: tpd.Tree): Unit = { + def cleanupTree(tree: tpd.Tree): Unit = seen += tree tree.foreachSubTree { t => - if (t.symbol.exists && t.hasType) { + if (t.symbol.exists && t.hasType) if (!t.symbol.isCompleted) t.symbol.info = UnspecifiedErrorType t.symbol.annotations.foreach { annot => /* In some cases annotations are are used on themself (possibly larger cycles). @@ -290,12 +266,9 @@ class InteractiveDriver(val settings: List[String]) extends Driver { if (annot.isEvaluated && !seen(annot.tree)) cleanupTree(annot.tree) } - } t.removeAllAttachments() } - } cleanupTree(tree) - } private def toSource(uri: URI, sourceCode: String): SourceFile = SourceFile.virtual(Paths.get(uri).toString, sourceCode) @@ -308,15 +281,13 @@ class InteractiveDriver(val settings: List[String]) extends Driver { * this compiler). In those cases, an un-initialized compiler may crash (for instance if * late-compilation is needed). */ - private def initialize(): Unit = { + private def initialize(): Unit = val run = compiler.newRun(using myInitCtx.fresh) myCtx = run.runContext run.compileUnits(Nil, myCtx) - } -} -object InteractiveDriver { +object InteractiveDriver: def toUriOption(file: AbstractFile): Option[URI] = if (!file.exists) None @@ -328,13 +299,11 @@ object InteractiveDriver { // AbstractFile#toUri method and implement it by returning a constant // passed as a parameter to a constructor of VirtualFile Some(Paths.get(file.path).toUri) - catch { + catch case e: InvalidPathException => None - } def toUriOption(source: SourceFile): Option[URI] = if (!source.exists) None else toUriOption(source.file) -} diff --git a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala index 60f01396e91e..2a988a8d2bb8 100644 --- a/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala +++ b/compiler/src/dotty/tools/dotc/interactive/SourceTree.scala @@ -13,25 +13,24 @@ import util._, util.Spans._ * * `tree` can be either an `Import` or a `NameTree`. */ -case class SourceTree(tree: tpd.Import | tpd.NameTree, source: SourceFile) { +case class SourceTree(tree: tpd.Import | tpd.NameTree, source: SourceFile): /** The position of `tree` */ final def pos(using Context): SourcePosition = source.atSpan(tree.span) /** The position of the name in `tree` */ - def namePos(using Context): SourcePosition = tree match { + def namePos(using Context): SourcePosition = tree match case tree: tpd.NameTree => // FIXME: Merge with NameTree#namePos ? val treeSpan = tree.span if (treeSpan.isZeroExtent || tree.name.toTermName == nme.ERROR) NoSourcePosition - else { + else // Constructors are named `` in the trees, but `this` in the source. - val nameLength = tree.name match { + val nameLength = tree.name match case nme.CONSTRUCTOR => nme.this_.toString.length case other => other.stripModuleClassSuffix.show.toString.length - } - val position = { + val position = // FIXME: This is incorrect in some cases, like with backquoted identifiers, // see https://github.com/lampepfl/dotty/pull/1634#issuecomment-257079436 val (start, end) = @@ -41,40 +40,31 @@ case class SourceTree(tree: tpd.Import | tpd.NameTree, source: SourceFile) { // If we don't have a point, we need to find it (treeSpan.end - nameLength, treeSpan.end) Span(start, end, start) - } source.atSpan(position) - } case _ => NoSourcePosition - } -} -object SourceTree { +object SourceTree: def fromSymbol(sym: ClassSymbol, id: String = "")(using Context): List[SourceTree] = if (sym == defn.SourceFileAnnot || // FIXME: No SourceFile annotation on SourceFile itself !sym.source.exists) // FIXME: We cannot deal with external projects yet Nil - else { + else import ast.Trees._ - def sourceTreeOfClass(tree: tpd.Tree): Option[SourceTree] = tree match { + def sourceTreeOfClass(tree: tpd.Tree): Option[SourceTree] = tree match case PackageDef(_, stats) => stats.flatMap(sourceTreeOfClass).headOption case tree: tpd.TypeDef if tree.symbol == sym => Some(SourceTree(tree, sym.source)) case _ => None - } - def sourceImports(tree: tpd.Tree, sourceFile: SourceFile): List[SourceTree] = tree match { + def sourceImports(tree: tpd.Tree, sourceFile: SourceFile): List[SourceTree] = tree match case PackageDef(_, stats) => stats.flatMap(sourceImports(_, sourceFile)) case imp: tpd.Import => SourceTree(imp, sourceFile) :: Nil case _ => Nil - } val tree = sym.rootTreeContaining(id) - sourceTreeOfClass(tree) match { + sourceTreeOfClass(tree) match case Some(namedTree) => namedTree :: sourceImports(tree, namedTree.source) case None => Nil - } - } -} diff --git a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala index c63409d0d52b..0c6bd673a501 100644 --- a/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala +++ b/compiler/src/dotty/tools/dotc/parsing/CharArrayReader.scala @@ -33,19 +33,17 @@ abstract class CharArrayReader { self => def isUnicodeEscape: Boolean = charOffset == lastUnicodeOffset /** Advance one character; reducing CR;LF pairs to just LF */ - final def nextChar(): Unit = { + final def nextChar(): Unit = val idx = charOffset lastCharOffset = idx charOffset = idx + 1 if (idx >= buf.length) ch = SU - else { + else val c = buf(idx) ch = c if (c == '\\') potentialUnicode() else if (c < ' ') { skipCR(); potentialLineEnd() } - } - } def getc(): Char = { nextChar() ; ch } @@ -53,41 +51,36 @@ abstract class CharArrayReader { self => * This is for use in multi-line strings, so there are no * "potential line ends" here. */ - final def nextRawChar(): Unit = { + final def nextRawChar(): Unit = val idx = charOffset lastCharOffset = idx charOffset = idx + 1 if (idx >= buf.length) ch = SU - else { + else val c = buf(idx) ch = c if (c == '\\') potentialUnicode() - } - } /** Interpret \\uxxxx escapes */ - private def potentialUnicode(): Unit = { - def evenSlashPrefix: Boolean = { + private def potentialUnicode(): Unit = + def evenSlashPrefix: Boolean = var p = charOffset - 2 while (p >= 0 && buf(p) == '\\') p -= 1 (charOffset - p) % 2 == 0 - } def udigit: Int = - if (charOffset >= buf.length) { + if (charOffset >= buf.length) // Since the positioning code is very insistent about throwing exceptions, // we have to decrement the position so our error message can be seen, since // we are one past EOF. This happens with e.g. val x = \ u 1 error("incomplete unicode escape", charOffset - 1) SU - } - else { + else val d = digit2int(buf(charOffset), 16) if (d >= 0) charOffset += 1 else error("error in unicode escape", charOffset) d - } - if (charOffset < buf.length && buf(charOffset) == 'u' && decodeUni && evenSlashPrefix) { + if (charOffset < buf.length && buf(charOffset) == 'u' && decodeUni && evenSlashPrefix) while ({ charOffset += 1 charOffset < buf.length && buf(charOffset) == 'u' @@ -96,16 +89,13 @@ abstract class CharArrayReader { self => val code = udigit << 12 | udigit << 8 | udigit << 4 | udigit lastUnicodeOffset = charOffset ch = code.toChar - } - } /** replace CR;LF by LF */ private def skipCR(): Unit = if (ch == CR) - if (charOffset < buf.length && buf(charOffset) == LF) { + if (charOffset < buf.length && buf(charOffset) == LF) charOffset += 1 ch = LF - } /** Handle line ends */ private def potentialLineEnd(): Unit = @@ -118,11 +108,10 @@ abstract class CharArrayReader { self => def lookaheadChar(): Char = lookaheadReader().getc() - class CharArrayLookaheadReader extends CharArrayReader { + class CharArrayLookaheadReader extends CharArrayReader: val buf: Array[Char] = self.buf charOffset = self.charOffset ch = self.ch override def decodeUni: Boolean = self.decodeUni def error(msg: String, offset: Int): Unit = self.error(msg, offset) - } } diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala index 6ec896dcb200..a08b6f84fae3 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaParsers.scala @@ -23,11 +23,11 @@ import util.Spans._ import scala.collection.mutable.{ListBuffer, LinkedHashMap} -object JavaParsers { +object JavaParsers: import ast.untpd._ - class JavaParser(source: SourceFile)(using Context) extends ParserCommon(source) { + class JavaParser(source: SourceFile)(using Context) extends ParserCommon(source): val definitions: Definitions = ctx.definitions import definitions._ @@ -40,19 +40,18 @@ object JavaParsers { /** This is the general parse entry point. * Overridden by ScriptParser */ - def parse(): Tree = { + def parse(): Tree = val t = compilationUnit() accept(EOF) t - } // -------- error handling --------------------------------------- - protected def skip(): Unit = { + protected def skip(): Unit = var nparens = 0 var nbraces = 0 - while (true) { - in.token match { + while (true) + in.token match case EOF => return case SEMI => @@ -67,23 +66,18 @@ object JavaParsers { case LBRACE => nbraces += 1 case _ => - } in.nextToken() - } - } def syntaxError(msg: Message, skipIt: Boolean): Unit = syntaxError(in.offset, msg, skipIt) - def syntaxError(offset: Int, msg: Message, skipIt: Boolean): Unit = { - if (offset > lastErrorOffset) { + def syntaxError(offset: Int, msg: Message, skipIt: Boolean): Unit = + if (offset > lastErrorOffset) syntaxError(msg, offset) // no more errors on this token. lastErrorOffset = in.offset - } if (skipIt) skip() - } def errorTypeTree: TypeTree = TypeTree().withType(UnspecifiedErrorType).withSpan(Span(in.offset)) @@ -106,66 +100,58 @@ object JavaParsers { def arrayOf(tpt: Tree): AppliedTypeTree = AppliedTypeTree(scalaDot(tpnme.Array), List(tpt)) - def makeTemplate(parents: List[Tree], stats: List[Tree], tparams: List[TypeDef], needsDummyConstr: Boolean): Template = { - def pullOutFirstConstr(stats: List[Tree]): (Tree, List[Tree]) = stats match { + def makeTemplate(parents: List[Tree], stats: List[Tree], tparams: List[TypeDef], needsDummyConstr: Boolean): Template = + def pullOutFirstConstr(stats: List[Tree]): (Tree, List[Tree]) = stats match case (meth: DefDef) :: rest if meth.name == nme.CONSTRUCTOR => (meth, rest) case first :: rest => val (constr, tail) = pullOutFirstConstr(rest) (constr, first :: tail) case nil => (EmptyTree, nil) - } var (constr1, stats1) = pullOutFirstConstr(stats) // A dummy first constructor is needed for Java classes so that the real constructors see the // import of the companion object. The constructor has parameter of type Unit so no Java code // can call it. // This also avoids clashes between the constructor parameter names and member names. - if (needsDummyConstr) { + if (needsDummyConstr) if (constr1 == EmptyTree) constr1 = makeConstructor(List(), Nil) stats1 = constr1 :: stats1 constr1 = makeConstructor(List(scalaDot(tpnme.Unit)), tparams, Flags.JavaDefined | Flags.PrivateLocal) - } - else if (constr1 == EmptyTree) { + else if (constr1 == EmptyTree) constr1 = makeConstructor(List(), tparams) - } Template(constr1.asInstanceOf[DefDef], parents, Nil, EmptyValDef, stats1) - } def makeSyntheticParam(count: Int, tpt: Tree): ValDef = makeParam(nme.syntheticParamName(count), tpt) def makeParam(name: TermName, tpt: Tree): ValDef = ValDef(name, tpt, EmptyTree).withMods(Modifiers(Flags.JavaDefined | Flags.Param)) - def makeConstructor(formals: List[Tree], tparams: List[TypeDef], flags: FlagSet = Flags.JavaDefined): DefDef = { + def makeConstructor(formals: List[Tree], tparams: List[TypeDef], flags: FlagSet = Flags.JavaDefined): DefDef = val vparams = formals.zipWithIndex.map { case (p, i) => makeSyntheticParam(i + 1, p) } DefDef(nme.CONSTRUCTOR, joinParams(tparams, List(vparams)), TypeTree(), EmptyTree).withMods(Modifiers(flags)) - } // ------------- general parsing --------------------------- /** skip parent or brace enclosed sequence of things */ - def skipAhead(): Unit = { + def skipAhead(): Unit = var nparens = 0 var nbraces = 0 while ({ - in.token match { + in.token match case LPAREN => nparens += 1 case LBRACE => nbraces += 1 case _ => - } in.nextToken() - in.token match { + in.token match case RPAREN => nparens -= 1 case RBRACE => nbraces -= 1 case _ => - } in.token != EOF && (nparens > 0 || nbraces > 0) }) () - } def skipTo(tokens: Int*): Unit = while (!(tokens contains in.token) && in.token != EOF) @@ -178,94 +164,80 @@ object JavaParsers { * * @return The offset at the start of the token to accept */ - def accept(token: Int): Int = { + def accept(token: Int): Int = val offset = in.offset - if (in.token != token) { + if (in.token != token) val offsetToReport = in.offset val msg = em"${tokenString(token)} expected but ${tokenString(in.token)} found." syntaxError(offsetToReport, msg, skipIt = true) - } if (in.token == token) in.nextToken() offset - } - def acceptClosingAngle(): Unit = { - val closers: PartialFunction[Int, Int] = { + def acceptClosingAngle(): Unit = + val closers: PartialFunction[Int, Int] = case GTGTGTEQ => GTGTEQ case GTGTGT => GTGT case GTGTEQ => GTEQ case GTGT => GT case GTEQ => EQUALS - } if (closers isDefinedAt in.token) in.token = closers(in.token) else accept(GT) - } def identForType(): TypeName = ident().toTypeName def ident(): Name = - if (in.token == IDENTIFIER) { + if (in.token == IDENTIFIER) val name = in.name in.nextToken() name - } - else { + else accept(IDENTIFIER) nme.ERROR - } - def repsep[T <: Tree](p: () => T, sep: Int): List[T] = { + def repsep[T <: Tree](p: () => T, sep: Int): List[T] = val buf = ListBuffer[T](p()) - while (in.token == sep) { + while (in.token == sep) in.nextToken() buf += p() - } buf.toList - } /** Convert (qual)ident to type identifier */ - def convertToTypeId(tree: Tree): Tree = convertToTypeName(tree) match { + def convertToTypeId(tree: Tree): Tree = convertToTypeName(tree) match case Some(t) => t.withSpan(tree.span) - case _ => tree match { - case AppliedTypeTree(_, _) | Select(_, _) => - tree - case _ => - syntaxError(IdentifierExpected(tree.show), tree.span) - errorTypeTree - } - } + case _ => tree match + case AppliedTypeTree(_, _) | Select(_, _) => + tree + case _ => + syntaxError(IdentifierExpected(tree.show), tree.span) + errorTypeTree /** Translate names in Select/Ident nodes to type names. */ - def convertToTypeName(tree: Tree): Option[RefTree] = tree match { + def convertToTypeName(tree: Tree): Option[RefTree] = tree match case Select(qual, name) => Some(Select(qual, name.toTypeName)) case Ident(name) => Some(Ident(name.toTypeName)) case _ => None - } // -------------------- specific parsing routines ------------------ - def qualId(): RefTree = { + def qualId(): RefTree = var t: RefTree = atSpan(in.offset) { Ident(ident()) } - while (in.token == DOT && in.lookaheadToken == IDENTIFIER) { + while (in.token == DOT && in.lookaheadToken == IDENTIFIER) in.nextToken() t = atSpan(t.span.start, in.offset) { Select(t, ident()) } - } t - } def optArrayBrackets(tpt: Tree): Tree = - if (in.token == LBRACKET) { + if (in.token == LBRACKET) val tpt1 = atSpan(tpt.span.start, in.offset) { arrayOf(tpt) } in.nextToken() accept(RBRACKET) optArrayBrackets(tpt1) - } else tpt def basicType(): Tree = - atSpan(in.offset) { - in.token match { + atSpan(in.offset): + in.token match case BYTE => in.nextToken(); TypeTree(ByteType) case SHORT => in.nextToken(); TypeTree(ShortType) case CHAR => in.nextToken(); TypeTree(CharType) @@ -275,40 +247,34 @@ object JavaParsers { case DOUBLE => in.nextToken(); TypeTree(DoubleType) case BOOLEAN => in.nextToken(); TypeTree(BooleanType) case _ => syntaxError(em"illegal start of type", skipIt = true); errorTypeTree - } - } def typ(): Tree = annotations() - optArrayBrackets { + optArrayBrackets: if (in.token == FINAL) in.nextToken() - if (in.token == IDENTIFIER) { + if (in.token == IDENTIFIER) var t = typeArgs(atSpan(in.offset)(Ident(ident()))) // typeSelect generates Select nodes if the lhs is an Ident or Select, // For other nodes it always assumes that the selected item is a type. - def typeSelect(t: Tree, name: Name) = t match { + def typeSelect(t: Tree, name: Name) = t match case Ident(_) | Select(_, _) => Select(t, name) case _ => Select(t, name.toTypeName) - } - while (in.token == DOT) { + while (in.token == DOT) in.nextToken() t = typeArgs(atSpan(t.span.start, in.offset)(typeSelect(t, ident()))) - } convertToTypeId(t) - } else basicType() - } - def typeArgs(t: Tree): Tree = { + def typeArgs(t: Tree): Tree = var wildnum = 0 def typeArg(): Tree = - if (in.token == QMARK) { + if (in.token == QMARK) val offset = in.offset in.nextToken() val hi = if (in.token == EXTENDS) { in.nextToken() ; typ() } else javaLangObject() val lo = if (in.token == SUPER) { in.nextToken() ; typ() } else EmptyTree - atSpan(offset) { + atSpan(offset): /* TypeDef( Modifiers(Flags.JavaDefined | Flags.Deferred), @@ -317,33 +283,25 @@ object JavaParsers { TypeBoundsTree(lo, hi)) */ TypeBoundsTree(lo, hi) - } - } else typ() - if (in.token == LT) { + if (in.token == LT) in.nextToken() val t1 = convertToTypeId(t) val args = repsep(() => typeArg(), COMMA) acceptClosingAngle() - atSpan(t1.span.start) { + atSpan(t1.span.start): AppliedTypeTree(t1, args) - } - } else t - } - def annotations(): List[Tree] = { + def annotations(): List[Tree] = var annots = new ListBuffer[Tree] - while (in.token == AT) { + while (in.token == AT) in.nextToken() - annotation() match { + annotation() match case Some(anno) => annots += anno case _ => - } - } annots.toList - } /** Annotation ::= TypeName [`(` [AnnotationArgument {`,` AnnotationArgument}] `)`] * AnnotationArgument ::= ElementValuePair | ELementValue @@ -360,7 +318,7 @@ object JavaParsers { * If we encounter expression that we cannot parse, we do not raise parsing error, * but instead we skip entire annotation silently. */ - def annotation(): Option[Tree] = { + def annotation(): Option[Tree] = def classOrId(): Tree = val id = qualId() if in.lookaheadToken == CLASS then @@ -382,23 +340,20 @@ object JavaParsers { if in.token == COMMA then in.nextToken() // using this instead of repsep allows us to handle trailing commas accept(RBRACE) - Option.unless(buffer contains None) { + Option.unless(buffer contains None): Apply(scalaDot(nme.Array), buffer.flatten.toList) - } def argValue(): Option[Tree] = - val tree = tryConstant match { + val tree = tryConstant match case Some(c) => Some(atSpan(in.offset)(Literal(c))) - case _ => in.token match { - case AT => - in.nextToken() - annotation() - case IDENTIFIER => Some(classOrId()) - case LBRACE => array() - case _ => None - } - } + case _ => in.token match + case AT => + in.nextToken() + annotation() + case IDENTIFIER => Some(classOrId()) + case LBRACE => array() + case _ => None if in.token == COMMA || in.token == RBRACE || in.token == RPAREN then tree else @@ -426,33 +381,29 @@ object JavaParsers { args += annArg() accept(RPAREN) - Option.unless(args contains None) { + Option.unless(args contains None): Apply( Select(New(id), nme.CONSTRUCTOR), args.flatten.toList ) - } - } - def modifiers(inInterface: Boolean): Modifiers = { + def modifiers(inInterface: Boolean): Modifiers = var flags: FlagSet = Flags.JavaDefined // assumed true unless we see public/private/protected var isPackageAccess = true var annots = new ListBuffer[Tree] def addAnnot(tpt: Tree) = - annots += atSpan(in.offset) { + annots += atSpan(in.offset): in.nextToken() New(tpt) - } while (true) - in.token match { + in.token match case AT if (in.lookaheadToken != INTERFACE) => in.nextToken() - annotation() match { + annotation() match case Some(anno) => annots += anno case _ => - } case PUBLIC => isPackageAccess = false in.nextToken() @@ -489,74 +440,61 @@ object JavaParsers { else tpnme.EMPTY return Modifiers(flags, privateWithin) withAnnotations annots.toList - } assert(false, "should not be here") throw new RuntimeException - } def typeParams(flags: FlagSet = Flags.JavaDefined | Flags.PrivateLocal | Flags.Param): List[TypeDef] = - if (in.token == LT) { + if (in.token == LT) in.nextToken() val tparams = repsep(() => typeParam(flags), COMMA) acceptClosingAngle() tparams - } else List() def typeParam(flags: FlagSet): TypeDef = - atSpan(in.offset) { + atSpan(in.offset): annotations() val name = identForType() val hi = if (in.token == EXTENDS) { in.nextToken() ; bound() } else javaLangObject() TypeDef(name, TypeBoundsTree(EmptyTree, hi)).withMods(Modifiers(flags)) - } def bound(): Tree = - atSpan(in.offset) { + atSpan(in.offset): val buf = ListBuffer[Tree](typ()) - while (in.token == AMP) { + while (in.token == AMP) in.nextToken() buf += typ() - } val ts = buf.toList if (ts.tail.isEmpty) ts.head else ts.reduce(makeAndType(_,_)) - } - def formalParams(): List[ValDef] = { + def formalParams(): List[ValDef] = accept(LPAREN) val vparams = if (in.token == RPAREN) List() else repsep(() => formalParam(), COMMA) accept(RPAREN) vparams - } - def formalParam(): ValDef = { + def formalParam(): ValDef = val start = in.offset if (in.token == FINAL) in.nextToken() annotations() var t = typ() - if (in.token == DOTDOTDOT) { + if (in.token == DOTDOTDOT) in.nextToken() - t = atSpan(t.span.start) { + t = atSpan(t.span.start): PostfixOp(t, Ident(tpnme.raw.STAR)) - } - } - atSpan(start, in.offset) { + atSpan(start, in.offset): varDecl(Modifiers(Flags.JavaDefined | Flags.Param), t, ident().toTermName) - } - } def optThrows(): Unit = - if (in.token == THROWS) { + if (in.token == THROWS) in.nextToken() repsep(() => typ(), COMMA) - } - def methodBody(): Tree = atSpan(in.offset) { + def methodBody(): Tree = atSpan(in.offset): skipAhead() accept(RBRACE) // skip block unimplementedExpr - } def definesInterface(token: Int): Boolean = token == INTERFACE || token == AT @@ -568,33 +506,29 @@ object JavaParsers { if in.token == IDENTIFIER && in.name == jnme.RECORDid then in.token = RECORD - def termDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = { + def termDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = val inInterface = definesInterface(parentToken) val tparams = if (in.token == LT) typeParams(Flags.JavaDefined | Flags.Param) else List() val isVoid = in.token == VOID var rtpt = if (isVoid) - atSpan(in.offset) { + atSpan(in.offset): in.nextToken() TypeTree(UnitType) - } else typ() var nameOffset = in.offset - val rtptName = rtpt match { + val rtptName = rtpt match case Ident(name) => name case _ => nme.EMPTY - } - if (in.token == LPAREN && rtptName != nme.EMPTY && !inInterface) { + if (in.token == LPAREN && rtptName != nme.EMPTY && !inInterface) // constructor declaration val vparams = formalParams() optThrows() - List { - atSpan(start) { + List: + atSpan(start): DefDef(nme.CONSTRUCTOR, joinParams(tparams, List(vparams)), TypeTree(), methodBody()).withMods(mods) - } - } - } else if (in.token == LBRACE && rtptName != nme.EMPTY && parentToken == RECORD) { + else if (in.token == LBRACE && rtptName != nme.EMPTY && parentToken == RECORD) /* record RecordName(T param1, ...) { RecordName { // <- here @@ -604,13 +538,12 @@ object JavaParsers { */ methodBody() Nil - } - else { + else var mods1 = mods if (mods.is(Flags.Abstract)) mods1 = mods &~ Flags.Abstract nameOffset = in.offset val name = ident() - if (in.token == LPAREN) { + if (in.token == LPAREN) // method declaration val vparams = formalParams() if (!isVoid) rtpt = optArrayBrackets(rtpt) @@ -620,36 +553,27 @@ object JavaParsers { if (bodyOk && in.token == LBRACE) methodBody() else - if (parentToken == AT && in.token == DEFAULT) { + if (parentToken == AT && in.token == DEFAULT) val annot = - atSpan(nameOffset) { + atSpan(nameOffset): New(Select(Select(scalaDot(nme.annotation), nme.internal), tpnme.AnnotationDefaultATTR), Nil) - } mods1 = mods1 withAddedAnnotation annot val unimplemented = unimplementedExpr skipTo(SEMI) accept(SEMI) unimplemented - } - else { + else accept(SEMI) EmptyTree - } //if (inInterface) mods1 |= Flags.Deferred - List { - atSpan(start, nameOffset) { + List: + atSpan(start, nameOffset): DefDef(name.toTermName, joinParams(tparams, List(vparams)), rtpt, body).withMods(mods1 | Flags.Method) - } - } - } - else { + else if (inInterface) mods1 |= Flags.Final | Flags.JavaStatic val result = fieldDecls(start, nameOffset, mods1, rtpt, name) accept(SEMI) result - } - } - } /** Parse a sequence of field declarations, separated by commas. * This one is tricky because a comma might also appear in an @@ -660,85 +584,70 @@ object JavaParsers { * Once we have reached the end of the statement, we know whether * these potential definitions are real or not. */ - def fieldDecls(start: Offset, firstNameOffset: Offset, mods: Modifiers, tpt: Tree, name: Name): List[Tree] = { + def fieldDecls(start: Offset, firstNameOffset: Offset, mods: Modifiers, tpt: Tree, name: Name): List[Tree] = val buf = ListBuffer[Tree]( atSpan(start, firstNameOffset) { varDecl(mods, tpt, name.toTermName) }) val maybe = new ListBuffer[Tree] // potential variable definitions. - while (in.token == COMMA) { + while (in.token == COMMA) in.nextToken() - if (in.token == IDENTIFIER) { // if there's an ident after the comma ... + if (in.token == IDENTIFIER) // if there's an ident after the comma ... val nextNameOffset = in.offset val name = ident() - if (in.token == EQUALS || in.token == SEMI) { // ... followed by a `=` or `;`, we know it's a real variable definition + if (in.token == EQUALS || in.token == SEMI) // ... followed by a `=` or `;`, we know it's a real variable definition buf ++= maybe buf += atSpan(start, nextNameOffset) { varDecl(mods, tpt, name.toTermName) } maybe.clear() - } else if (in.token == COMMA) // ... if there's a comma after the ident, it could be a real vardef or not. maybe += atSpan(start, nextNameOffset) { varDecl(mods, tpt, name.toTermName) } - else { // ... if there's something else we were still in the initializer of the + else // ... if there's something else we were still in the initializer of the // previous var def; skip to next comma or semicolon. skipTo(COMMA, SEMI) maybe.clear() - } - } - else { // ... if there's no ident following the comma we were still in the initializer of the + else // ... if there's no ident following the comma we were still in the initializer of the // previous var def; skip to next comma or semicolon. skipTo(COMMA, SEMI) maybe.clear() - } - } if (in.token == SEMI) buf ++= maybe // every potential vardef that survived until here is real. buf.toList - } - def varDecl(mods: Modifiers, tpt: Tree, name: TermName): ValDef = { + def varDecl(mods: Modifiers, tpt: Tree, name: TermName): ValDef = val tpt1 = optArrayBrackets(tpt) /** Tries to detect final static literals syntactically and returns a constant type replacement */ - def optConstantTpe(): Tree = { + def optConstantTpe(): Tree = def constantTpe(const: Constant): Tree = TypeTree(ConstantType(const)) - def forConst(const: Constant): Tree = { + def forConst(const: Constant): Tree = if (in.token != SEMI) tpt1 - else { - def isStringTyped = tpt1 match { + else + def isStringTyped = tpt1 match case Ident(n: TypeName) => "String" == n.toString case _ => false - } if (const.tag == Constants.StringTag && isStringTyped) constantTpe(const) - else tpt1 match { + else tpt1 match case TypedSplice(tpt2) => - if (const.tag == Constants.BooleanTag || const.isNumeric) { + if (const.tag == Constants.BooleanTag || const.isNumeric) //for example, literal 'a' is ok for float. 127 is ok for byte, but 128 is not. val converted = const.convertTo(tpt2.tpe) if (converted == null) tpt1 else constantTpe(converted) - } else tpt1 case _ => tpt1 - } - } - } in.nextToken() // EQUALS - if (mods.is(Flags.JavaStatic) && mods.is(Flags.Final)) { + if (mods.is(Flags.JavaStatic) && mods.is(Flags.Final)) tryConstant.map(forConst).getOrElse(tpt1) - } else tpt1 - } val tpt2: Tree = - if (in.token == EQUALS && !mods.is(Flags.Param)) { + if (in.token == EQUALS && !mods.is(Flags.Param)) val res = optConstantTpe() skipTo(COMMA, SEMI) res - } else tpt1 val mods1 = if (mods.is(Flags.Final)) mods else mods | Flags.Mutable ValDef(name, tpt2, if (mods.is(Flags.Param)) EmptyTree else unimplementedExpr).withMods(mods1) - } def memberDecl(start: Offset, mods: Modifiers, parentToken: Int, parentTParams: List[TypeDef]): List[Tree] = in.token match case CLASS | ENUM | RECORD | INTERFACE | AT => @@ -747,45 +656,40 @@ object JavaParsers { termDecl(start, mods, parentToken, parentTParams) def makeCompanionObject(cdef: TypeDef, statics: List[Tree]): Tree = - atSpan(cdef.span) { + atSpan(cdef.span): assert(cdef.span.exists) ModuleDef(cdef.name.toTermName, makeTemplate(List(), statics, List(), false)).withMods((cdef.mods & Flags.RetainedModuleClassFlags).toTermFlags) - } def addCompanionObject(statics: List[Tree], cdef: TypeDef): List[Tree] = List(makeCompanionObject(cdef, statics), cdef) - def importDecl(): List[Tree] = { + def importDecl(): List[Tree] = val start = in.offset accept(IMPORT) val buf = new ListBuffer[Name] def collectIdents() : Int = - if (in.token == ASTERISK) { + if (in.token == ASTERISK) val starOffset = in.offset in.nextToken() buf += nme.WILDCARD starOffset - } - else { + else val nameOffset = in.offset buf += ident() - if (in.token == DOT) { + if (in.token == DOT) in.nextToken() collectIdents() - } else nameOffset - } if (in.token == STATIC) in.nextToken() else buf += nme.ROOTPKG val lastnameOffset = collectIdents() accept(SEMI) val names = buf.toList - if (names.length < 2) { + if (names.length < 2) syntaxError(start, em"illegal import", skipIt = false) List() - } - else { + else val qual = names.tail.init.foldLeft(Ident(names.head): Tree)(Select(_, _)) val lastname = names.last val ident = Ident(lastname).withSpan(Span(lastnameOffset)) @@ -795,36 +699,30 @@ object JavaParsers { // } val imp = atSpan(start) { Import(qual, ImportSelector(ident) :: Nil) } imp :: Nil - } - } def interfacesOpt(): List[Tree] = - if (in.token == IMPLEMENTS) { + if (in.token == IMPLEMENTS) in.nextToken() repsep(() => typ(), COMMA) - } else List() - def classDecl(start: Offset, mods: Modifiers): List[Tree] = { + def classDecl(start: Offset, mods: Modifiers): List[Tree] = accept(CLASS) val nameOffset = in.offset val name = identForType() val tparams = typeParams() val superclass = - if (in.token == EXTENDS) { + if (in.token == EXTENDS) in.nextToken() typ() - } else javaLangObject() val interfaces = interfacesOpt() val (statics, body) = typeBody(CLASS, name, tparams) - val cls = atSpan(start, nameOffset) { + val cls = atSpan(start, nameOffset): TypeDef(name, makeTemplate(superclass :: interfaces, body, tparams, true)).withMods(mods) - } addCompanionObject(statics, cls) - } def recordDecl(start: Offset, mods: Modifiers): List[Tree] = accept(RECORD) @@ -858,7 +756,7 @@ object JavaParsers { .withMods(Modifiers(Flags.JavaDefined | Flags.Synthetic, mods.privateWithin)) // return the trees - val recordTypeDef = atSpan(start, nameOffset) { + val recordTypeDef = atSpan(start, nameOffset): TypeDef(name, makeTemplate( parents = superclass :: interfaces, @@ -867,52 +765,46 @@ object JavaParsers { true ) ).withMods(mods) - } addCompanionObject(statics, recordTypeDef) end recordDecl - def interfaceDecl(start: Offset, mods: Modifiers): List[Tree] = { + def interfaceDecl(start: Offset, mods: Modifiers): List[Tree] = accept(INTERFACE) val nameOffset = in.offset val name = identForType() val tparams = typeParams() val parents = - if (in.token == EXTENDS) { + if (in.token == EXTENDS) in.nextToken() repsep(() => typ(), COMMA) - } else List(javaLangObject()) val (statics, body) = typeBody(INTERFACE, name, tparams) - val iface = atSpan(start, nameOffset) { + val iface = atSpan(start, nameOffset): TypeDef( name, makeTemplate(parents, body, tparams, false)).withMods(mods | Flags.JavaInterface) - } addCompanionObject(statics, iface) - } - def typeBody(leadingToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = { + def typeBody(leadingToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = accept(LBRACE) val defs = typeBodyDecls(leadingToken, parentName, parentTParams) accept(RBRACE) defs - } - def typeBodyDecls(parentToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = { + def typeBodyDecls(parentToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = val inInterface = definesInterface(parentToken) val statics = new ListBuffer[Tree] val members = new ListBuffer[Tree] - while (in.token != RBRACE && in.token != EOF) { + while (in.token != RBRACE && in.token != EOF) val start = in.offset var mods = modifiers(inInterface) - if (in.token == LBRACE) { + if (in.token == LBRACE) skipAhead() // skip init block, we just assume we have seen only static accept(RBRACE) - } else if (in.token == SEMI) in.nextToken() - else { + else adaptRecordIdentifier() if (in.token == ENUM || in.token == RECORD || definesInterface(in.token)) mods |= Flags.JavaStatic val decls = memberDecl(start, mods, parentToken, parentTParams) @@ -920,34 +812,28 @@ object JavaParsers { statics else members) ++= decls - } - } (statics.toList, members.toList) - } def annotationParents: List[Tree] = List( javaLangObject(), Select(javaLangDot(nme.annotation), tpnme.Annotation) ) - def annotationDecl(start: Offset, mods: Modifiers): List[Tree] = { + def annotationDecl(start: Offset, mods: Modifiers): List[Tree] = accept(AT) accept(INTERFACE) val nameOffset = in.offset val name = identForType() val (statics, body) = typeBody(AT, name, List()) - val constructorParams = body.collect { + val constructorParams = body.collect: case dd: DefDef => makeParam(dd.name, dd.tpt) - } val constr = DefDef(nme.CONSTRUCTOR, List(constructorParams), TypeTree(), EmptyTree).withMods(Modifiers(Flags.JavaDefined)) val templ = makeTemplate(annotationParents, constr :: body, List(), true) - val annot = atSpan(start, nameOffset) { + val annot = atSpan(start, nameOffset): TypeDef(name, templ).withMods(mods | Flags.JavaInterface | Flags.JavaAnnotation) - } addCompanionObject(statics, annot) - } - def enumDecl(start: Offset, mods: Modifiers): List[Tree] = { + def enumDecl(start: Offset, mods: Modifiers): List[Tree] = accept(ENUM) val nameOffset = in.offset val name = identForType() @@ -956,20 +842,17 @@ object JavaParsers { accept(LBRACE) val buf = new ListBuffer[Tree] def parseEnumConsts(): Unit = - if (in.token != RBRACE && in.token != SEMI && in.token != EOF) { + if (in.token != RBRACE && in.token != SEMI && in.token != EOF) buf += enumConst(enumType) - if (in.token == COMMA) { + if (in.token == COMMA) in.nextToken() parseEnumConsts() - } - } parseEnumConsts() val consts = buf.toList val (statics, body) = - if (in.token == SEMI) { + if (in.token == SEMI) in.nextToken() typeBodyDecls(ENUM, name, List()) - } else (List(), List()) val predefs = List( @@ -990,30 +873,24 @@ object JavaParsers { */ val superclazz = Apply(TypeApply( Select(New(javaLangDot(tpnme.Enum)), nme.CONSTRUCTOR), List(enumType)), Nil) - val enumclazz = atSpan(start, nameOffset) { + val enumclazz = atSpan(start, nameOffset): TypeDef(name, makeTemplate(superclazz :: interfaces, body, List(), true)).withMods(mods | Flags.JavaEnumTrait) - } addCompanionObject(consts ::: statics ::: predefs, enumclazz) - } - def enumConst(enumType: Tree): ValDef = { + def enumConst(enumType: Tree): ValDef = annotations() - atSpan(in.offset) { + atSpan(in.offset): val name = ident() - if (in.token == LPAREN) { + if (in.token == LPAREN) // skip arguments skipAhead() accept(RPAREN) - } - if (in.token == LBRACE) { + if (in.token == LBRACE) // skip classbody skipAhead() accept(RBRACE) - } ValDef(name.toTermName, enumType, unimplementedExpr).withMods(Modifiers(Flags.JavaEnumTrait | Flags.StableRealizable | Flags.JavaDefined | Flags.JavaStatic)) - } - } def typeDecl(start: Offset, mods: Modifiers): List[Tree] = in.token match case ENUM => enumDecl(start, mods) @@ -1023,12 +900,11 @@ object JavaParsers { case RECORD => recordDecl(start, mods) case _ => in.nextToken(); syntaxError(em"illegal start of type declaration", skipIt = true); List(errorTypeTree) - def tryConstant: Option[Constant] = { - val negate = in.token match { + def tryConstant: Option[Constant] = + val negate = in.token match case MINUS | BANG => in.nextToken(); true case _ => false - } - val l = in.token match { + val l = in.token match case TRUE => !negate case FALSE => negate case CHARLIT => in.strVal.charAt(0) @@ -1038,51 +914,42 @@ object JavaParsers { case DOUBLELIT => in.floatVal(negate) case STRINGLIT => in.strVal case _ => null - } if (l == null) None - else { + else in.nextToken() Some(Constant(l)) - } - } /** CompilationUnit ::= [package QualId semi] TopStatSeq */ - def compilationUnit(): Tree = { + def compilationUnit(): Tree = val start = in.offset val pkg: RefTree = - if (in.token == AT || in.token == PACKAGE) { + if (in.token == AT || in.token == PACKAGE) annotations() accept(PACKAGE) val pkg = qualId() accept(SEMI) pkg - } else Ident(nme.EMPTY_PACKAGE) - thisPackageName = convertToTypeName(pkg) match { + thisPackageName = convertToTypeName(pkg) match case Some(t) => t.name.toTypeName case _ => tpnme.EMPTY - } val buf = new ListBuffer[Tree] while (in.token == IMPORT) buf ++= importDecl() - while (in.token != EOF && in.token != RBRACE) { + while (in.token != EOF && in.token != RBRACE) while (in.token == SEMI) in.nextToken() - if (in.token != EOF) { + if (in.token != EOF) val start = in.offset val mods = modifiers(inInterface = false) adaptRecordIdentifier() // needed for typeDecl buf ++= typeDecl(start, mods) - } - } val unit = atSpan(start) { PackageDef(pkg, buf.toList) } accept(EOF) unit match case PackageDef(Ident(nme.EMPTY_PACKAGE), Nil) => EmptyTree case _ => unit - } - } /** OutlineJavaParser parses top-level declarations in `source` to find declared classes, ignoring their bodies (which @@ -1090,11 +957,8 @@ object JavaParsers { * This is necessary even for Java, because the filename defining a non-public classes cannot be determined from the * classname alone. */ - class OutlineJavaParser(source: SourceFile)(using Context) extends JavaParser(source) with OutlineParserCommon { + class OutlineJavaParser(source: SourceFile)(using Context) extends JavaParser(source) with OutlineParserCommon: override def skipBracesHook(): Option[Tree] = None - override def typeBody(leadingToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = { + override def typeBody(leadingToken: Int, parentName: Name, parentTParams: List[TypeDef]): (List[Tree], List[Tree]) = skipBraces() (List(EmptyValDef), List(EmptyTree)) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala index d21d4b85b5df..dbc920ba35d6 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaScanners.scala @@ -12,16 +12,15 @@ import util.Chars._ import PartialFunction.cond import core.Decorators.em -object JavaScanners { +object JavaScanners: - class JavaScanner(source: SourceFile, override val startFrom: Offset = 0)(using Context) extends ScannerCommon(source) { + class JavaScanner(source: SourceFile, override val startFrom: Offset = 0)(using Context) extends ScannerCommon(source): override def decodeUni: Boolean = true - def toToken(name: SimpleName): Token = { + def toToken(name: SimpleName): Token = val idx = name.start if (idx >= 0 && idx <= lastKeywordStart) kwArray(idx) else IDENTIFIER - } private class JavaTokenData0 extends TokenData @@ -59,14 +58,14 @@ object JavaScanners { /** read next token */ - private def fetchToken(): Unit = { + private def fetchToken(): Unit = offset = charOffset - 1 - ch match { + ch match case ' ' | '\t' | CR | LF | FF => nextChar() fetchToken() case _ => - (ch: @switch) match { + (ch: @switch) match case 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | @@ -86,10 +85,9 @@ object JavaScanners { case '0' => putChar(ch) nextChar() - if (ch == 'x' || ch == 'X') { + if (ch == 'x' || ch == 'X') nextChar() base = 16 - } else base = 8 getNumber() @@ -122,69 +120,58 @@ object JavaScanners { case '\'' => nextChar() getlitch() - if (ch == '\'') { + if (ch == '\'') nextChar() token = CHARLIT setStrVal() - } else error(em"unclosed character literal") case '=' => token = EQUALS nextChar() - if (ch == '=') { + if (ch == '=') token = EQEQ nextChar() - } case '>' => token = GT nextChar() - if (ch == '=') { + if (ch == '=') token = GTEQ nextChar() - } - else if (ch == '>') { + else if (ch == '>') token = GTGT nextChar() - if (ch == '=') { + if (ch == '=') token = GTGTEQ nextChar() - } - else if (ch == '>') { + else if (ch == '>') token = GTGTGT nextChar() - if (ch == '=') { + if (ch == '=') token = GTGTGTEQ nextChar() - } - } - } case '<' => token = LT nextChar() - if (ch == '=') { + if (ch == '=') token = LTEQ nextChar() - } - else if (ch == '<') { + else if (ch == '<') token = LTLT nextChar() - if (ch == '=') { + if (ch == '=') token = LTLTEQ nextChar() - } - } case '!' => token = BANG nextChar() - if (ch == '=') { + if (ch == '=') token = BANGEQ nextChar() - } case '~' => token = TILDE @@ -205,102 +192,86 @@ object JavaScanners { case '&' => token = AMP nextChar() - if (ch == '&') { + if (ch == '&') token = AMPAMP nextChar() - } - else if (ch == '=') { + else if (ch == '=') token = AMPEQ nextChar() - } case '|' => token = BAR nextChar() - if (ch == '|') { + if (ch == '|') token = BARBAR nextChar() - } - else if (ch == '=') { + else if (ch == '=') token = BAREQ nextChar() - } case '+' => token = PLUS nextChar() - if (ch == '+') { + if (ch == '+') token = PLUSPLUS nextChar() - } - else if (ch == '=') { + else if (ch == '=') token = PLUSEQ nextChar() - } case '-' => token = MINUS nextChar() - if (ch == '-') { + if (ch == '-') token = MINUSMINUS nextChar() - } - else if (ch == '=') { + else if (ch == '=') token = MINUSEQ nextChar() - } case '*' => token = ASTERISK nextChar() - if (ch == '=') { + if (ch == '=') token = ASTERISKEQ nextChar() - } case '/' => nextChar() - if (!skipComment()) { + if (!skipComment()) token = SLASH nextChar() - if (ch == '=') { + if (ch == '=') token = SLASHEQ nextChar() - } - } else fetchToken() case '^' => token = HAT nextChar() - if (ch == '=') { + if (ch == '=') token = HATEQ nextChar() - } case '%' => token = PERCENT nextChar() - if (ch == '=') { + if (ch == '=') token = PERCENTEQ nextChar() - } case '.' => token = DOT nextChar() - if ('0' <= ch && ch <= '9') { + if ('0' <= ch && ch <= '9') putChar('.'); getFraction() - } - else if (ch == '.') { + else if (ch == '.') nextChar() - if (ch == '.') { + if (ch == '.') nextChar() token = DOTDOTDOT - } else error(em"`.` character expected") - } case ';' => token = SEMI @@ -336,47 +307,37 @@ object JavaScanners { case SU => if (isAtEnd) token = EOF - else { + else error(em"illegal character") nextChar() - } case _ => - if (Character.isUnicodeIdentifierStart(ch)) { + if (Character.isUnicodeIdentifierStart(ch)) putChar(ch) nextChar() getIdentRest() - } - else { + else error(em"illegal character: ${ch.toInt}") nextChar() - } - } - } - } - protected def skipComment(): Boolean = { - @tailrec def skipLineComment(): Unit = ch match { + protected def skipComment(): Boolean = + @tailrec def skipLineComment(): Unit = ch match case CR | LF | SU => case _ => nextChar(); skipLineComment() - } - @tailrec def skipJavaComment(): Unit = ch match { + @tailrec def skipJavaComment(): Unit = ch match case SU => incompleteInputError(em"unclosed comment") case '*' => nextChar(); if (ch == '/') nextChar() else skipJavaComment() case _ => nextChar(); skipJavaComment() - } - ch match { + ch match case '/' => nextChar(); skipLineComment(); true case '*' => nextChar(); skipJavaComment(); true case _ => false - } - } // Identifiers --------------------------------------------------------------- private def getIdentRest(): Unit = while (true) - (ch: @switch) match { + (ch: @switch) match case 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | @@ -403,15 +364,12 @@ object JavaScanners { finishNamed() return case _ => - if (Character.isUnicodeIdentifierPart(ch)) { + if (Character.isUnicodeIdentifierPart(ch)) putChar(ch) nextChar() - } - else { + else finishNamed() return - } - } // Literals ----------------------------------------------------------------- @@ -429,14 +387,12 @@ object JavaScanners { val leadch: Char = ch var oct: Int = digit2int(ch, 8) nextChar() - if ('0' <= ch && ch <= '7') { + if ('0' <= ch && ch <= '7') oct = oct * 8 + digit2int(ch, 8) nextChar() - if (leadch <= '3' && '0' <= ch && ch <= '7') { + if (leadch <= '3' && '0' <= ch && ch <= '7') oct = oct * 8 + digit2int(ch, 8) nextChar() - } - } oct.asInstanceOf[Char] end octal def greatEscape: Char = @@ -475,15 +431,13 @@ object JavaScanners { /** Read a triple-quote delimited text block, starting after the first three double quotes. */ - private def getTextBlock(): Unit = { + private def getTextBlock(): Unit = // Open delimiter is followed by optional space, then a newline - while (ch == ' ' || ch == '\t' || ch == FF) { + while (ch == ' ' || ch == '\t' || ch == FF) nextChar() - } - if (ch != LF && ch != CR) { // CR-LF is already normalized into LF by `JavaCharArrayReader` + if (ch != LF && ch != CR) // CR-LF is already normalized into LF by `JavaCharArrayReader` error(em"illegal text block open delimiter sequence, missing line terminator") return - } nextChar() /* Do a lookahead scan over the full text block to: @@ -496,68 +450,58 @@ object JavaScanners { var lineWhiteSpacePrefix = 0 var lineIsOnlyWhitespace = true val in = LookaheadScanner() - while (!blockClosed && (isUnicodeEscape || ch != SU)) { - if (in.ch == '\"') { // Potential end of the block + while (!blockClosed && (isUnicodeEscape || ch != SU)) + if (in.ch == '\"') // Potential end of the block in.nextChar() - if (in.ch == '\"') { + if (in.ch == '\"') in.nextChar() - if (in.ch == '\"') { + if (in.ch == '\"') blockClosed = true commonWhiteSpacePrefix = commonWhiteSpacePrefix min lineWhiteSpacePrefix blockEndOffset = in.charOffset - 2 - } - } // Not the end of the block - just a single or double " character - if (!blockClosed) { + if (!blockClosed) lineIsOnlyWhitespace = false - } - } else if (in.ch == CR || in.ch == LF) { // new line in the block + else if (in.ch == CR || in.ch == LF) // new line in the block in.nextChar() - if (!lineIsOnlyWhitespace) { + if (!lineIsOnlyWhitespace) commonWhiteSpacePrefix = commonWhiteSpacePrefix min lineWhiteSpacePrefix - } lineWhiteSpacePrefix = 0 lineIsOnlyWhitespace = true - } else if (lineIsOnlyWhitespace && Character.isWhitespace(in.ch)) { // extend white space prefix + else if (lineIsOnlyWhitespace && Character.isWhitespace(in.ch)) // extend white space prefix in.nextChar() lineWhiteSpacePrefix += 1 - } else { + else lineIsOnlyWhitespace = false in.getlitch(scanOnly = true, inTextBlock = true) - } - } // Bail out if the block never did have an end - if (!blockClosed) { + if (!blockClosed) error(em"unclosed text block") return - } // Second pass: construct the literal string value this time - while (charOffset < blockEndOffset) { + while (charOffset < blockEndOffset) // Drop the line's leading whitespace var remainingPrefix = commonWhiteSpacePrefix - while (remainingPrefix > 0 && ch != CR && ch != LF && charOffset < blockEndOffset) { + while (remainingPrefix > 0 && ch != CR && ch != LF && charOffset < blockEndOffset) nextChar() remainingPrefix -= 1 - } var trailingWhitespaceLength = 0 var escapedNewline = false // Does the line end with `\`? - while (ch != CR && ch != LF && charOffset < blockEndOffset && !escapedNewline) { - if (Character.isWhitespace(ch)) { + while (ch != CR && ch != LF && charOffset < blockEndOffset && !escapedNewline) + if (Character.isWhitespace(ch)) trailingWhitespaceLength += 1 - } else { + else trailingWhitespaceLength = 0 - } // Detect if the line is about to end with `\` if ch == '\\' && cond(lookaheadChar()) { case CR | LF => true } then escapedNewline = true getlitch(scanOnly = false, inTextBlock = true) - } // Remove the last N characters from the buffer */ def popNChars(n: Int): Unit = @@ -571,11 +515,9 @@ object JavaScanners { popNChars(trailingWhitespaceLength) // Normalize line terminators - if ((ch == CR || ch == LF) && !escapedNewline) { + if ((ch == CR || ch == LF) && !escapedNewline) nextChar() putChar('\n') - } - } token = STRINGLIT setStrVal() @@ -584,138 +526,117 @@ object JavaScanners { nextChar() nextChar() nextChar() - } end getTextBlock /** read fractional part and exponent of floating point number * if one is present. */ - protected def getFraction(): Unit = { + protected def getFraction(): Unit = token = DOUBLELIT - while ('0' <= ch && ch <= '9') { + while ('0' <= ch && ch <= '9') putChar(ch) nextChar() - } - if (ch == 'e' || ch == 'E') { + if (ch == 'e' || ch == 'E') val lookahead = lookaheadReader() lookahead.nextChar() if (lookahead.ch == '+' || lookahead.ch == '-') lookahead.nextChar() - if ('0' <= lookahead.ch && lookahead.ch <= '9') { + if ('0' <= lookahead.ch && lookahead.ch <= '9') putChar(ch) nextChar() - if (ch == '+' || ch == '-') { + if (ch == '+' || ch == '-') putChar(ch) nextChar() - } - while ('0' <= ch && ch <= '9') { + while ('0' <= ch && ch <= '9') putChar(ch) nextChar() - } - } token = DOUBLELIT - } - if (ch == 'd' || ch == 'D') { + if (ch == 'd' || ch == 'D') putChar(ch) nextChar() token = DOUBLELIT - } - else if (ch == 'f' || ch == 'F') { + else if (ch == 'f' || ch == 'F') putChar(ch) nextChar() token = FLOATLIT - } setStrVal() - } /** convert name to long value */ def intVal(negated: Boolean): Long = if (token == CHARLIT && !negated) if (strVal.length > 0) strVal.charAt(0).toLong else 0 - else { + else var value: Long = 0 val divider = if (base == 10) 1 else 2 val limit: Long = if (token == LONGLIT) Long.MaxValue else Int.MaxValue var i = 0 val len = strVal.length - while (i < len) { + while (i < len) val d = digit2int(strVal.charAt(i), base) - if (d < 0) { + if (d < 0) error(em"malformed integer number") return 0 - } if (value < 0 || limit / (base / divider) < value || limit - (d / divider) < value * (base / divider) && - !(negated && limit == value * base - 1 + d)) { + !(negated && limit == value * base - 1 + d)) error(em"integer number too large") return 0 - } value = value * base + d i += 1 - } if (negated) -value else value - } /** convert name, base to double value */ - def floatVal(negated: Boolean): Double = { + def floatVal(negated: Boolean): Double = val limit: Double = if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue - try { + try val value: Double = java.lang.Double.valueOf(strVal.toString).nn.doubleValue() if (value > limit) error(em"floating point number too large") if (negated) -value else value - } catch { + catch case _: NumberFormatException => error(em"malformed floating point number") 0.0 - } - } /** read a number into name and set base */ - protected def getNumber(): Unit = { - while (digit2int(ch, if (base < 10) 10 else base) >= 0) { + protected def getNumber(): Unit = + while (digit2int(ch, if (base < 10) 10 else base) >= 0) putChar(ch) nextChar() - } token = INTLIT - if (base <= 10 && ch == '.') { + if (base <= 10 && ch == '.') val lookahead = lookaheadReader() lookahead.nextChar() - lookahead.ch match { + lookahead.ch match case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' | 'd' | 'D' | 'e' | 'E' | 'f' | 'F' => putChar(ch) nextChar() return getFraction() case _ => - if (!isIdentifierStart(lookahead.ch)) { + if (!isIdentifierStart(lookahead.ch)) putChar(ch) nextChar() return getFraction() - } - } - } if (base <= 10 && (ch == 'e' || ch == 'E' || ch == 'f' || ch == 'F' || ch == 'd' || ch == 'D')) return getFraction() setStrVal() - if (ch == 'l' || ch == 'L') { + if (ch == 'l' || ch == 'L') nextChar() token = LONGLIT - } - } // Errors ----------------------------------------------------------------- - override def toString(): String = token match { + override def toString(): String = token match case IDENTIFIER => s"id($name)" case CHARLIT => s"char($strVal)" case INTLIT => s"int($strVal, $base)" @@ -729,14 +650,11 @@ object JavaScanners { "," case _ => tokenString(token) - } /* Initialization: read first char, then first token */ protected def initialize(): Unit = nextChar() nextToken() initialize() - } private val (lastKeywordStart, kwArray) = buildKeywordArray(keywords) -} diff --git a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala index 2b7882173e00..15aab3c5e2cd 100644 --- a/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/JavaTokens.scala @@ -4,7 +4,7 @@ package parsing import collection.immutable.BitSet -object JavaTokens extends TokensCommon { +object JavaTokens extends TokensCommon: inline val minToken = EMPTY final def maxToken: Int = DOUBLE @@ -92,4 +92,3 @@ object JavaTokens extends TokensCommon { inline val LONG = 186; enter(LONG, "long") inline val FLOAT = 187; enter(FLOAT, "float") inline val DOUBLE = 188; enter(DOUBLE, "double") -} diff --git a/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala b/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala index a67bca34cae2..261a6d2bd0be 100644 --- a/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala +++ b/compiler/src/dotty/tools/dotc/parsing/ParserPhase.scala @@ -9,7 +9,7 @@ import dotty.tools.dotc.util.{ NoSourcePosition, SourcePosition } import dotty.tools.dotc.util.Stats.record import dotty.tools.unsupported -class Parser extends Phase { +class Parser extends Phase: override def phaseName: String = Parser.name override def description: String = Parser.description @@ -22,24 +22,22 @@ class Parser extends Phase { */ private[dotc] var firstXmlPos: SourcePosition = NoSourcePosition - def parse(using Context) = monitor("parser") { + def parse(using Context) = monitor("parser"): val unit = ctx.compilationUnit unit.untpdTree = if (unit.isJava) new JavaParsers.JavaParser(unit.source).parse() - else { + else val p = new Parsers.Parser(unit.source) // p.in.debugTokenStream = true val tree = p.parse() if (p.firstXmlPos.exists && !firstXmlPos.exists) firstXmlPos = p.firstXmlPos tree - } if (Config.checkPositions) unit.untpdTree.checkPos(nonOverlapping = !unit.isJava && !ctx.reporter.hasErrors) - } - override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { + override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = val unitContexts = for unit <- units yield report.inform(s"parsing ${unit.source}") @@ -49,12 +47,9 @@ class Parser extends Phase { record("parsedTrees", ast.Trees.ntrees) unitContexts.map(_.compilationUnit) - } def run(using Context): Unit = unsupported("run") -} -object Parser{ +object Parser: val name: String = "parser" val description: String = "scan and parse sources" -} diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index d286ad384ac9..24c684c39afd 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -34,7 +34,7 @@ import config.Feature.{sourceVersion, migrateTo3, globalOnlyImports} import config.SourceVersion._ import config.SourceVersion -object Parsers { +object Parsers: import ast.untpd._ @@ -57,18 +57,16 @@ object Parsers { case Expr, Type, Pattern type StageKind = Int - object StageKind { + object StageKind: val None = 0 val Quoted = 1 val Spliced = 1 << 1 val QuotedPattern = 1 << 2 - } extension (buf: ListBuffer[Tree]) - def +++=(x: Tree) = x match { + def +++=(x: Tree) = x match case x: Thicket => buf ++= x.trees case x => buf += x - } /** The parse starting point depends on whether the source file is self-contained: * if not, the AST will be supplemented. @@ -81,7 +79,7 @@ object Parsers { private val InCond: Region => Region = Scanners.InParens(LPAREN, _) private val InFor : Region => Region = Scanners.InBraces(_) - abstract class ParserCommon(val source: SourceFile)(using Context) { + abstract class ParserCommon(val source: SourceFile)(using Context): val in: ScannerCommon @@ -122,11 +120,10 @@ object Parsers { if (in.token == BACKQUOTED_IDENT) in.offset + 1 else in.offset /** in.offset, except if this is at a new line, in which case `lastOffset` is preferred. */ - def expectedOffset: Int = { + def expectedOffset: Int = val current = in.sourcePos() val last = in.sourcePos(in.lastOffset) if (current.line != last.line) in.lastOffset else in.offset - } /* ------------- ERROR HANDLING ------------------------------------------- */ /** The offset where the last syntax error was reported, or if a skip to a @@ -151,25 +148,21 @@ object Parsers { def unimplementedExpr(using Context): Select = Select(scalaDot(nme.Predef), nme.???) - } - trait OutlineParserCommon extends ParserCommon { + trait OutlineParserCommon extends ParserCommon: def accept(token: Int): Int def skipBracesHook(): Option[Tree] - def skipBraces(): Unit = { + def skipBraces(): Unit = accept(if (in.token == INDENT) INDENT else LBRACE) var openBraces = 1 while (in.token != EOF && openBraces > 0) - skipBracesHook() getOrElse { + skipBracesHook() getOrElse: if (in.token == LBRACE || in.token == INDENT) openBraces += 1 else if (in.token == RBRACE || in.token == OUTDENT) openBraces -= 1 in.nextToken() - } - } - } - class Parser(source: SourceFile, allowRewrite: Boolean = true)(using Context) extends ParserCommon(source) { + class Parser(source: SourceFile, allowRewrite: Boolean = true)(using Context) extends ParserCommon(source): val in: Scanner = new Scanner(source, profile = Profile.current, allowRewrite = allowRewrite) // in.debugTokenStream = true // uncomment to see the token stream of the standard scanner, but not syntax highlighting @@ -177,11 +170,10 @@ object Parsers { /** This is the general parse entry point. * Overridden by ScriptParser */ - def parse(): Tree = { + def parse(): Tree = val t = compilationUnit() accept(EOF) t - } /* -------------- TOKEN CLASSES ------------------------------------------- */ @@ -206,7 +198,7 @@ object Parsers { def isModifier: Boolean = modifierTokens.contains(in.token) || in.isSoftModifier def isBindingIntro: Boolean = { - in.token match { + in.token match case USCORE => true case IDENTIFIER | BACKQUOTED_IDENT => in.lookahead.isArrow @@ -215,7 +207,6 @@ object Parsers { lookahead.skipParens() lookahead.isArrow case _ => false - } } && !in.isSoftModifierInModifierPosition def isExprIntro: Boolean = @@ -236,10 +227,9 @@ object Parsers { * in a quoted block '{...' */ def isSplice: Boolean = - in.token == IDENTIFIER && in.name(0) == '$' && { + in.token == IDENTIFIER && in.name(0) == '$' `&&`: if in.name.length == 1 then in.lookahead.token == LBRACE else (staged & StageKind.Quoted) != 0 - } /* ------------- ERROR HANDLING ------------------------------------------- */ @@ -247,12 +237,11 @@ object Parsers { * This is the case if the characters between the preceding end-of-line and offset1 * are a prefix of the characters between the preceding end-of-line and offset2. */ - def isLeqIndented(offset1: Int, offset2: Int): Boolean = { + def isLeqIndented(offset1: Int, offset2: Int): Boolean = def recur(idx1: Int, idx2: Int): Boolean = idx1 == offset1 || idx2 < offset2 && source(idx1) == source(idx2) && recur(idx1 + 1, idx2 + 1) recur(source.startOfLine(offset1), source.startOfLine(offset2)) - } /** Skip on error to next safe point. */ @@ -300,14 +289,13 @@ object Parsers { if in.token == token then nextToken() offset - def accept(name: Name): Int = { + def accept(name: Name): Int = val offset = in.offset if !isIdent(name) then syntaxErrorOrIncomplete(em"`$name` expected") if isIdent(name) then nextToken() offset - } def acceptColon(): Int = val offset = in.offset @@ -366,60 +354,51 @@ object Parsers { def syntaxVersionError(option: String, span: Span) = syntaxError(em"""This construct is not allowed under $option.${rewriteNotice(`3.0-migration`, option)}""", span) - def rewriteToNewSyntax(span: Span = Span(in.offset)): Boolean = { - if (in.newSyntax) { + def rewriteToNewSyntax(span: Span = Span(in.offset)): Boolean = + if (in.newSyntax) if (in.rewrite) return true syntaxVersionError("-new-syntax", span) - } false - } - def rewriteToOldSyntax(span: Span = Span(in.offset)): Boolean = { - if (in.oldSyntax) { + def rewriteToOldSyntax(span: Span = Span(in.offset)): Boolean = + if (in.oldSyntax) if (in.rewrite) return true syntaxVersionError("-old-syntax", span) - } false - } def errorTermTree(start: Offset): Literal = atSpan(start, in.offset, in.offset) { Literal(Constant(null)) } private var inFunReturnType = false - private def fromWithinReturnType[T](body: => T): T = { + private def fromWithinReturnType[T](body: => T): T = val saved = inFunReturnType - try { + try inFunReturnType = true body - } finally inFunReturnType = saved - } /** A flag indicating we are parsing in the annotations of a primary * class constructor */ private var inClassConstrAnnots = false - private def fromWithinClassConstr[T](body: => T): T = { + private def fromWithinClassConstr[T](body: => T): T = val saved = inClassConstrAnnots inClassConstrAnnots = true try body finally inClassConstrAnnots = saved - } private var inEnum = false - private def withinEnum[T](body: => T): T = { + private def withinEnum[T](body: => T): T = val saved = inEnum inEnum = true try body finally inEnum = saved - } private var staged = StageKind.None - def withinStaged[T](kind: StageKind)(op: => T): T = { + def withinStaged[T](kind: StageKind)(op: => T): T = val saved = staged staged |= kind try op finally staged = saved - } /* ---------- TREE CONSTRUCTION ------------------------------------------- */ @@ -467,16 +446,15 @@ object Parsers { /** Checks that tuples don't contain a parameter. */ def checkNonParamTuple(t: Tree) = t match - case Tuple(ts) => ts.collectFirst { - case param: ValDef => - syntaxError(em"invalid parameter definition syntax in tuple value", param.span) - } + case Tuple(ts) => ts.collectFirst: + case param: ValDef => + syntaxError(em"invalid parameter definition syntax in tuple value", param.span) case _ => /** Convert (qual)ident to type identifier */ - def convertToTypeId(tree: Tree): Tree = tree match { + def convertToTypeId(tree: Tree): Tree = tree match case id @ Ident(name) => cpy.Ident(id)(name.toTypeName) case id @ Select(qual, name) => @@ -484,7 +462,6 @@ object Parsers { case _ => syntaxError(IdentifierExpected(tree.show), tree.span) tree - } /* --------------- PLACEHOLDERS ------------------------------------------- */ @@ -505,25 +482,22 @@ object Parsers { placeholderParams = savedPlaceholderParams in.languageImportContext = savedLanguageImportContext - def isWildcard(t: Tree): Boolean = t match { + def isWildcard(t: Tree): Boolean = t match case Ident(name1) => placeholderParams.nonEmpty && name1 == placeholderParams.head.name case Typed(t1, _) => isWildcard(t1) case Annotated(t1, _) => isWildcard(t1) case Parens(t1) => isWildcard(t1) case _ => false - } - def isWildcardType(t: Tree): Boolean = t match { + def isWildcardType(t: Tree): Boolean = t match case t: TypeBoundsTree => true case Parens(t1) => isWildcardType(t1) case _ => false - } def rejectWildcardType(t: Tree, fallbackTree: Tree = scalaAny): Tree = - if (isWildcardType(t)) { + if (isWildcardType(t)) syntaxError(UnboundWildcardType(), t.span) fallbackTree - } else t /* -------------- XML ---------------------------------------------------- */ @@ -533,10 +507,9 @@ object Parsers { * The current position is recorded for later error reporting if it turns out * that we don't have scala-xml on the compilation classpath. */ - lazy val xmlp: xml.MarkupParsers.MarkupParser = { + lazy val xmlp: xml.MarkupParsers.MarkupParser = myFirstXmlPos = source.atSpan(Span(in.offset)) new MarkupParser(this, true) - } /** The position of the first XML literal encountered while parsing, * NoSourcePosition if there were no XML literals. @@ -577,9 +550,8 @@ object Parsers { /** {`,` } */ def commaSeparated[T](part: () => T): List[T] = - in.currentRegion.withCommasExpected { + in.currentRegion.withCommasExpected: commaSeparatedRest(part(), part) - } /** {`,` } * @@ -623,9 +595,9 @@ object Parsers { em"""Line is indented too far to the right, or a `{` is missing before: | |${t.tryToShow}""" - else + else in.spaceTabMismatchMsg(startIndentWidth, nextIndentWidth), - in.next.offset + in.next.offset ) t else body() @@ -678,21 +650,19 @@ object Parsers { /** Is `expr` a tree that lacks a final `else`? Put such trees in `{...}` to make * sure we don't accidentally merge them with a following `else`. */ - def isPartialIf(expr: Tree): Boolean = expr match { + def isPartialIf(expr: Tree): Boolean = expr match case If(_, _, EmptyTree) => true case If(_, _, e) => isPartialIf(e) case _ => false - } /** Is `expr` a (possibly curried) function that has a multi-statement block * as body? Put such trees in `{...}` since we don't enclose statements following * a `=>` in braces. */ - def isBlockFunction[T](expr: T): Boolean = expr match { + def isBlockFunction[T](expr: T): Boolean = expr match case Function(_, body) => isBlockFunction(body) case Block(stats, expr) => stats.nonEmpty || isBlockFunction(expr) case _ => false - } /** Start of first line after in.lastOffset that does not have a comment * at indent width greater than the indent width of the closing brace. @@ -705,14 +675,13 @@ object Parsers { then closingOffset(source.nextLine(lineStart)) else lineStart - def needsBraces(t: Any): Boolean = t match { + def needsBraces(t: Any): Boolean = t match case Match(EmptyTree, _) => true case Block(stats, expr) => stats.nonEmpty || needsBraces(expr) case expr: Tree => followsColon || isPartialIf(expr) && in.token == ELSE || isBlockFunction(expr) case _ => true - } // begin indentedToBraces val startOpening = if followsColon then @@ -859,7 +828,7 @@ object Parsers { in.skipToken(res) /** Drop (...) or { ... }, replacing the closing element with `endStr` */ - def dropParensOrBraces(start: Offset, endStr: String): Unit = { + def dropParensOrBraces(start: Offset, endStr: String): Unit = if (testChar(start + 1, Chars.isLineBreakChar)) patch(source, Span(if (testChar(start - 1, ' ')) start - 1 else start, start + 1), "") else @@ -872,27 +841,24 @@ object Parsers { if (closingStartsLine && endStr.isEmpty) elimRegion(in.last.offset) else (in.lastOffset - 1, in.lastOffset) patch(source, Span(startClosing, endClosing), s"$preFill$endStr$postFill") - } /** If all other characters on the same line as `span` are blanks, widen to * the whole line. */ - def widenIfWholeLine(span: Span): Span = { + def widenIfWholeLine(span: Span): Span = val start = skipBlanks(span.start - 1, -1) val end = skipBlanks(span.end, 1) if (testChar(start, Chars.LF) && testChar(end, Chars.LF)) Span(start, end) else span - } /** Drop current token, if it is a `then` or `do`. */ def dropTerminator(): Unit = if in.token == THEN || in.token == DO then var startOffset = in.offset var endOffset = in.lastCharOffset - if (in.isAfterLineEnd) { + if (in.isAfterLineEnd) if (testChar(endOffset, ' ')) endOffset += 1 - } else if (testChar(startOffset - 1, ' ') && !overlapsPatch(source, Span(startOffset - 1, endOffset))) @@ -901,11 +867,10 @@ object Parsers { /** rewrite code with (...) around the source code of `t` */ def revertToParens(t: Tree): Unit = - if (t.span.exists) { + if (t.span.exists) patch(source, t.span.startPos, "(") patch(source, t.span.endPos, ")") dropTerminator() - } /* --------- LOOKAHEAD --------------------------------------- */ @@ -915,43 +880,38 @@ object Parsers { * - continue a statement (e.g. `else`, catch`), or * - terminate the current scope? */ - def followedByToken(query: Token): Boolean = { + def followedByToken(query: Token): Boolean = val lookahead = in.LookaheadScanner() var braces = 0 - while (true) { + while (true) val token = lookahead.token - if (braces == 0) { + if (braces == 0) if (token == query) return true if (stopScanTokens.contains(token) || lookahead.isNestedEnd) return false - } else if (token == EOF) return false else if (lookahead.isNestedEnd) braces -= 1 if (lookahead.isNestedStart) braces += 1 lookahead.nextToken() - } false - } /** Is the following sequence the generators of a for-expression enclosed in (...)? */ - def followingIsEnclosedGenerators(): Boolean = { + def followingIsEnclosedGenerators(): Boolean = val lookahead = in.LookaheadScanner() var parens = 1 lookahead.nextToken() - while (parens != 0 && lookahead.token != EOF) { + while (parens != 0 && lookahead.token != EOF) val token = lookahead.token if (token == LPAREN) parens += 1 else if (token == RPAREN) parens -= 1 lookahead.nextToken() - } if (lookahead.token == LARROW) false // it's a pattern else if (lookahead.isIdent) true // it's not a pattern since token cannot be an infix operator else followedByToken(LARROW) // `<-` comes before possible statement starts - } /** Are the next token the "GivenSig" part of a given definition, * i.e. an identifier followed by type and value parameters, followed by `:`? @@ -988,7 +948,7 @@ object Parsers { /** Is current ident a `*`, and is it followed by a `)`, `, )`, `,EOF`? The latter two are not syntactically valid, but we need to include them here for error recovery. */ def followingIsVararg(): Boolean = - in.isIdent(nme.raw.STAR) && { + in.isIdent(nme.raw.STAR) `&&`: val lookahead = in.LookaheadScanner() lookahead.nextToken() lookahead.token == RPAREN @@ -997,7 +957,6 @@ object Parsers { lookahead.nextToken() lookahead.token == RPAREN || lookahead.token == EOF } - } /** When encountering a `:`, is that in the binding of a lambda? * @pre location of the enclosing expression is `InParens`, so there is an open `(`. @@ -1044,26 +1003,21 @@ object Parsers { if (op1.isRightAssocOperatorName == op2LeftAssoc) syntaxError(MixedLeftAndRightAssociativeOps(op1, op2, op2LeftAssoc), offset) - def reduceStack(base: List[OpInfo], top: Tree, prec: Int, leftAssoc: Boolean, op2: Name, isType: Boolean): Tree = { + def reduceStack(base: List[OpInfo], top: Tree, prec: Int, leftAssoc: Boolean, op2: Name, isType: Boolean): Tree = if (opStack != base && precedence(opStack.head.operator.name) == prec) checkAssoc(opStack.head.offset, opStack.head.operator.name, op2, leftAssoc) def recur(top: Tree): Tree = if (opStack == base) top - else { + else val opInfo = opStack.head val opPrec = precedence(opInfo.operator.name) - if (prec < opPrec || leftAssoc && prec == opPrec) { + if (prec < opPrec || leftAssoc && prec == opPrec) opStack = opStack.tail - recur { - atSpan(opInfo.operator.span union opInfo.operand.span union top.span) { + recur: + atSpan(opInfo.operator.span union opInfo.operand.span union top.span): InfixOp(opInfo.operand, opInfo.operator, top) - } - } - } else top - } recur(top) - } /** True if we are seeing a lambda argument after a colon of the form: * : (params) => @@ -1101,9 +1055,8 @@ object Parsers { val topInfo = opStack.head opStack = opStack.tail val od = reduceStack(base, topInfo.operand, 0, true, in.name, isType) - atSpan(startOffset(od), topInfo.offset) { + atSpan(startOffset(od), topInfo.offset): PostfixOp(od, topInfo.operator) - } else recur(operand(location)) else val t = reduceStack(base, top, minPrec, leftAssoc = true, in.name, isType) @@ -1143,7 +1096,7 @@ object Parsers { def typeIdent(): Ident = makeIdent(in.token, in.offset, ident().toTypeName) - private def makeIdent(tok: Token, offset: Offset, name: Name) = { + private def makeIdent(tok: Token, offset: Offset, name: Name) = val tree = Ident(name) if (tok == BACKQUOTED_IDENT) tree.pushAttachment(Backquoted, ()) @@ -1151,16 +1104,14 @@ object Parsers { val errorOffset = offset min (in.lastOffset - 1) if (tree.name == nme.ERROR && tree.span == NoSpan) tree.withSpan(Span(errorOffset, errorOffset)) else atSpan(offset)(tree) - } def wildcardIdent(): Ident = atSpan(accept(USCORE)) { Ident(nme.WILDCARD) } /** Accept identifier or match clause acting as a selector on given tree `t` */ def selectorOrMatch(t: Tree): Tree = - atSpan(startOffset(t), in.offset) { + atSpan(startOffset(t), in.offset): if in.token == MATCH then matchClause(t) else Select(t, ident()) - } def selector(t: Tree): Tree = atSpan(startOffset(t), in.offset) { Select(t, ident()) } @@ -1249,21 +1200,20 @@ object Parsers { * @param negOffset The offset of a preceding `-' sign, if any. * If the literal is not negated, negOffset == in.offset. */ - def literal(negOffset: Int = in.offset, inPattern: Boolean = false, inTypeOrSingleton: Boolean = false, inStringInterpolation: Boolean = false): Tree = { - def literalOf(token: Token): Tree = { + def literal(negOffset: Int = in.offset, inPattern: Boolean = false, inTypeOrSingleton: Boolean = false, inStringInterpolation: Boolean = false): Tree = + def literalOf(token: Token): Tree = val isNegated = negOffset < in.offset def digits0 = in.removeNumberSeparators(in.strVal) def digits = if (isNegated) "-" + digits0 else digits0 if !inTypeOrSingleton then - token match { + token match case INTLIT => return Number(digits, NumberKind.Whole(in.base)) case DECILIT => return Number(digits, NumberKind.Decimal) case EXPOLIT => return Number(digits, NumberKind.Floating) case _ => - } import scala.util.FromDigits._ val value = - try token match { + try token match case INTLIT => intFromDigits(digits, in.base) case LONGLIT => longFromDigits(digits, in.base) case FLOATLIT => floatFromDigits(digits) @@ -1276,39 +1226,31 @@ object Parsers { case _ => syntaxErrorOrIncomplete(IllegalLiteral()) null - } - catch { + catch case ex: FromDigitsException => syntaxErrorOrIncomplete(ex.getMessage.toMessage) - } Literal(Constant(value)) - } - if (inStringInterpolation) { - val t = in.token match { + if (inStringInterpolation) + val t = in.token match case STRINGLIT | STRINGPART => val value = in.strVal atSpan(negOffset, negOffset, negOffset + value.length) { Literal(Constant(value)) } case _ => syntaxErrorOrIncomplete(IllegalLiteral()) atSpan(negOffset) { Literal(Constant(null)) } - } nextToken() t - } - else atSpan(negOffset) { + else atSpan(negOffset): if (in.token == QUOTEID) - if ((staged & StageKind.Spliced) != 0 && Chars.isIdentifierStart(in.name(0))) { - val t = atSpan(in.offset + 1) { + if ((staged & StageKind.Spliced) != 0 && Chars.isIdentifierStart(in.name(0))) + val t = atSpan(in.offset + 1): val tok = in.toToken(in.name) - tok match { + tok match case TRUE | FALSE | NULL => literalOf(tok) case THIS => This(EmptyTypeIdent) case _ => Ident(in.name) - } - } nextToken() Quote(t, Nil) - } else if !in.featureEnabled(Feature.symbolLiterals) then val name = in.name // capture name (not `in`) in the warning message closure @@ -1324,15 +1266,12 @@ object Parsers { patch(source, Span(in.charOffset - 1), "\")") atSpan(skipToken()) { SymbolLit(in.strVal) } else if (in.token == INTERPOLATIONID) interpolatedString(inPattern) - else { + else val t = literalOf(in.token) nextToken() t - } - } - } - private def interpolatedString(inPattern: Boolean = false): Tree = atSpan(in.offset) { + private def interpolatedString(inPattern: Boolean = false): Tree = atSpan(in.offset): val segmentBuf = new ListBuffer[Tree] val interpolator = in.name val isTripleQuoted = @@ -1346,21 +1285,18 @@ object Parsers { atSpan(in.offset) { if (in.token == IDENTIFIER) termIdent() - else if (in.token == USCORE && inPattern) { + else if (in.token == USCORE && inPattern) nextToken() Ident(nme.WILDCARD) - } - else if (in.token == THIS) { + else if (in.token == THIS) nextToken() This(EmptyTypeIdent) - } else if (in.token == LBRACE) if (inPattern) Block(Nil, inBraces(pattern())) else expr() - else { + else report.error(InterpolatedStringError(), source.atSpan(Span(in.offset))) EmptyTree - } }) var offsetCorrection = if isTripleQuoted then 3 else 1 @@ -1371,7 +1307,6 @@ object Parsers { segmentBuf += literal(inPattern = inPattern, negOffset = in.offset + offsetCorrection, inStringInterpolation = true) InterpolatedString(interpolator, segmentBuf.toList) - } /* ------------- NEW LINES ------------------------------------------------- */ @@ -1478,12 +1413,11 @@ object Parsers { */ def toplevelTyp(): Tree = rejectWildcardType(typ()) - private def getFunction(tree: Tree): Option[Function] = tree match { + private def getFunction(tree: Tree): Option[Function] = tree match case Parens(tree1) => getFunction(tree1) case Block(Nil, tree1) => getFunction(tree1) case t: Function => Some(t) case _ => None - } private def checkFunctionNotErased(f: Function, context: String) = def fail(span: Span) = @@ -1495,9 +1429,8 @@ object Parsers { if hasErasedParam then fail(f.span) // erased parameter in term - val hasErasedMods = f.args.collectFirst { + val hasErasedMods = f.args.collectFirst: case v: ValDef if v.mods.is(Flags.Erased) => v - } hasErasedMods match case Some(param) => fail(param.span) case _ => @@ -1512,9 +1445,8 @@ object Parsers { /** CaptureSet ::= `{` CaptureRef {`,` CaptureRef} `}` -- under captureChecking */ - def captureSet(): List[Tree] = inBraces { + def captureSet(): List[Tree] = inBraces: if in.token == RBRACE then Nil else commaSeparated(captureRef) - } def capturesAndResult(core: () => Tree): Tree = if Feature.ccEnabled && in.token == LBRACE && in.offset == in.lastOffset @@ -1541,7 +1473,7 @@ object Parsers { var erasedArgs: ListBuffer[Boolean] = ListBuffer() def functionRest(params: List[Tree]): Tree = val paramSpan = Span(start, in.lastOffset) - atSpan(start, in.offset) { + atSpan(start, in.offset): var token = in.token var isPure = false if isPureArrow(nme.PUREARROW) then @@ -1582,47 +1514,41 @@ object Parsers { lambdaAbstract(tparams, Function(newParams, newResultType)) else Function(params, resultType) - } var isValParamList = false val t = - if (in.token == LPAREN) { + if (in.token == LPAREN) nextToken() - if (in.token == RPAREN) { + if (in.token == RPAREN) nextToken() functionRest(Nil) - } - else { + else val paramStart = in.offset def addErased() = erasedArgs.addOne(isErasedKw) if isErasedKw then { skipToken(); } addErased() - val ts = in.currentRegion.withCommasExpected { + val ts = in.currentRegion.withCommasExpected: funArgType() match case Ident(name) if name != tpnme.WILDCARD && in.isColon => isValParamList = true - def funParam(start: Offset, mods: Modifiers) = { - atSpan(start) { + def funParam(start: Offset, mods: Modifiers) = + atSpan(start): addErased() typedFunParam(in.offset, ident(), imods) - } - } commaSeparatedRest( typedFunParam(paramStart, name.toTermName, imods), () => funParam(in.offset, imods)) case t => - def funParam() = { + def funParam() = addErased() funArgType() - } commaSeparatedRest(t, funParam) - } accept(RPAREN) if isValParamList || in.isArrow || isPureArrow then functionRest(ts) - else { + else val ts1 = ts.mapConserve { t => if isByNameType(t) then syntaxError(ByNameParameterNotSupported(t), t.span) @@ -1636,30 +1562,23 @@ object Parsers { withTypeRest( annotTypeRest( simpleTypeRest(tuple))))) - } - } - } - else if (in.token == LBRACKET) { + else if (in.token == LBRACKET) val start = in.offset val tparams = typeParamClause(ParamOwner.TypeParam) if (in.token == TLARROW) atSpan(start, skipToken())(LambdaTypeTree(tparams, toplevelTyp())) - else if (in.token == ARROW || isPureArrow(nme.PUREARROW)) { + else if (in.token == ARROW || isPureArrow(nme.PUREARROW)) val arrowOffset = skipToken() val body = toplevelTyp() - atSpan(start, arrowOffset) { - getFunction(body) match { + atSpan(start, arrowOffset): + getFunction(body) match case Some(f) => checkFunctionNotErased(f, "poly function") PolyFunction(tparams, body) case None => syntaxError(em"Implementation restriction: polymorphic function types must have a value parameter", arrowOffset) Ident(nme.ERROR.toTypeName) - } - } - } else { accept(TLARROW); typ() } - } else if (in.token == INDENT) enclosed(INDENT, typ()) else infixType() @@ -1679,7 +1598,7 @@ object Parsers { t end typ - private def makeKindProjectorTypeDef(name: TypeName): TypeDef = { + private def makeKindProjectorTypeDef(name: TypeName): TypeDef = val isVarianceAnnotated = name.startsWith("+") || name.startsWith("-") // We remove the variance marker from the name without passing along the specified variance at all // The real variance will be inferred at a later stage but may contradict the variance specified, @@ -1687,38 +1606,33 @@ object Parsers { // we may assume that variance annotations have already been checked by the Scala 2 compiler. val unannotatedName = if (isVarianceAnnotated) name.mapLast(_.drop(1)) else name TypeDef(unannotatedName, WildcardTypeBoundsTree()).withFlags(Param) - } /** Replaces kind-projector's `*` in a list of types arguments with synthetic names, * returning the new argument list and the synthetic type definitions. */ - private def replaceKindProjectorPlaceholders(params: List[Tree]): (List[Tree], List[TypeDef]) = { + private def replaceKindProjectorPlaceholders(params: List[Tree]): (List[Tree], List[TypeDef]) = val tparams = new ListBuffer[TypeDef] - def addParam() = { + def addParam() = val name = WildcardParamName.fresh().toTypeName tparams += makeKindProjectorTypeDef(name) Ident(name) - } val uscores = ctx.settings.YkindProjector.value == "underscores" - val newParams = params.mapConserve { + val newParams = params.mapConserve: case param @ Ident(tpnme.raw.STAR | tpnme.raw.MINUS_STAR | tpnme.raw.PLUS_STAR) => addParam() case param @ Ident(tpnme.USCOREkw | tpnme.raw.MINUS_USCORE | tpnme.raw.PLUS_USCORE) if uscores => addParam() case other => other - } (newParams, tparams.toList) - } private def implicitKwPos(start: Int): Span = Span(start, start + nme.IMPLICITkw.asSimpleName.length) /** TypedFunParam ::= id ':' Type */ def typedFunParam(start: Offset, name: TermName, mods: Modifiers = EmptyModifiers): ValDef = - atSpan(start) { + atSpan(start): acceptColon() makeParameter(name, typ(), mods) - } /** FunParamClause ::= ‘(’ TypedFunParam {‘,’ TypedFunParam } ‘)’ */ @@ -1757,7 +1671,7 @@ object Parsers { || !canStartInfixTypeTokens.contains(ahead.token) || ahead.lineOffset > 0 - def refinedTypeRest(t: Tree): Tree = { + def refinedTypeRest(t: Tree): Tree = argumentStart() if in.isNestedStart then refinedTypeRest(atSpan(startOffset(t)) { @@ -1772,7 +1686,6 @@ object Parsers { makeRetaining(t, cs, tpnme.retains) else t - } /** WithType ::= AnnotType {`with' AnnotType} (deprecated) */ @@ -1813,19 +1726,17 @@ object Parsers { */ def splice(isType: Boolean): Tree = val start = in.offset - atSpan(in.offset) { + atSpan(in.offset): val inPattern = (staged & StageKind.QuotedPattern) != 0 val expr = - if (in.name.length == 1) { + if (in.name.length == 1) nextToken() val inPattern = (staged & StageKind.QuotedPattern) != 0 withinStaged(StageKind.Spliced)(if (inPattern) inBraces(pattern()) else stagedBlock()) - } - else atSpan(in.offset + 1) { + else atSpan(in.offset + 1): val id = Ident(in.name.drop(1)) nextToken() id - } if isType then val msg = "Type splicing with `$` in quotes not supported anymore" val inPattern = (staged & StageKind.QuotedPattern) != 0 @@ -1838,7 +1749,6 @@ object Parsers { SplicePattern(expr, Nil) else Splice(expr) - } /** SimpleType ::= SimpleLiteral * | ‘?’ SubtypeBounds @@ -1885,11 +1795,10 @@ object Parsers { * | SimpleType1 TypeArgs * | SimpleType1 `#' id */ - def simpleType1() = simpleTypeRest { + def simpleType1() = simpleTypeRest: if in.token == LPAREN then - atSpan(in.offset) { + atSpan(in.offset): makeTupleOrParens(inParens(argTypes(namedOK = false, wildOK = true))) - } else if in.token == LBRACE then atSpan(in.offset) { RefinedTypeTree(EmptyTree, refinement(indentOK = false)) } else if (isSplice) @@ -1905,98 +1814,82 @@ object Parsers { singletonCompletion(selector(t)) else convertToTypeId(t) singletonCompletion(simpleRef()) - } - private def simpleTypeRest(t: Tree): Tree = in.token match { + private def simpleTypeRest(t: Tree): Tree = in.token match case HASH => simpleTypeRest(typeProjection(t)) case LBRACKET => simpleTypeRest(atSpan(startOffset(t)) { - val applied = rejectWildcardType(t) - val args = typeArgs(namedOK = false, wildOK = true) + val applied = rejectWildcardType(t) + val args = typeArgs(namedOK = false, wildOK = true) - if (!ctx.settings.YkindProjector.isDefault) { - def fail(): Tree = { - syntaxError( + if (!ctx.settings.YkindProjector.isDefault) + def fail(): Tree = + syntaxError( em"λ requires a single argument of the form X => ... or (X, Y) => ...", Span(startOffset(t), in.lastOffset) - ) - AppliedTypeTree(applied, args) - } - - applied match { - case Ident(tpnme.raw.LAMBDA) => - args match { - case List(Function(params, body)) => - val typeDefs = params.collect { - case param @ Ident(name) => makeKindProjectorTypeDef(name.toTypeName).withSpan(param.span) - } - if (typeDefs.length != params.length) fail() - else LambdaTypeTree(typeDefs, body) - case _ => - fail() - } - case _ => - val (newArgs, tparams) = replaceKindProjectorPlaceholders(args) + ) + AppliedTypeTree(applied, args) + + applied match + case Ident(tpnme.raw.LAMBDA) => + args match + case List(Function(params, body)) => + val typeDefs = params.collect: + case param @ Ident(name) => makeKindProjectorTypeDef(name.toTypeName).withSpan(param.span) + if (typeDefs.length != params.length) fail() + else LambdaTypeTree(typeDefs, body) + case _ => + fail() + case _ => + val (newArgs, tparams) = replaceKindProjectorPlaceholders(args) - lambdaAbstract(tparams, AppliedTypeTree(applied, newArgs)) - } + lambdaAbstract(tparams, AppliedTypeTree(applied, newArgs)) - } else { - AppliedTypeTree(applied, args) - } - }) + else + AppliedTypeTree(applied, args) + }) case _ => - if (!ctx.settings.YkindProjector.isDefault) { - t match { + if (!ctx.settings.YkindProjector.isDefault) + t match case Tuple(params) => val (newParams, tparams) = replaceKindProjectorPlaceholders(params) - if (tparams.isEmpty) { + if (tparams.isEmpty) t - } else { + else LambdaTypeTree(tparams, Tuple(newParams)) - } case _ => t - } - } else { + else t - } - } - private def typeProjection(t: Tree): Tree = { + private def typeProjection(t: Tree): Tree = accept(HASH) val id = typeIdent() atSpan(startOffset(t), startOffset(id)) { Select(t, id.name) } - } /** ArgTypes ::= Type {`,' Type} * | NamedTypeArg {`,' NamedTypeArg} * NamedTypeArg ::= id `=' Type */ - def argTypes(namedOK: Boolean, wildOK: Boolean): List[Tree] = { + def argTypes(namedOK: Boolean, wildOK: Boolean): List[Tree] = - def argType() = { + def argType() = val t = typ() if (wildOK) t else rejectWildcardType(t) - } - def namedTypeArg() = { + def namedTypeArg() = val name = ident() accept(EQUALS) NamedArg(name.toTypeName, argType()) - } if (namedOK && in.token == IDENTIFIER) - in.currentRegion.withCommasExpected { - argType() match { + in.currentRegion.withCommasExpected: + argType() match case Ident(name) if in.token == EQUALS => nextToken() commaSeparatedRest(NamedArg(name, argType()), () => namedTypeArg()) case firstArg => commaSeparatedRest(firstArg, () => argType()) - } - } else commaSeparated(() => argType()) - } def paramTypeOf(core: () => Tree): Tree = if in.token == ARROW || isPureArrow(nme.PUREARROW) then @@ -2029,14 +1922,12 @@ object Parsers { /** ParamValueType ::= [`into`] Type [`*'] */ - def paramValueType(): Tree = { + def paramValueType(): Tree = val t = maybeInto(toplevelTyp) - if (isIdent(nme.raw.STAR)) { + if (isIdent(nme.raw.STAR)) nextToken() atSpan(startOffset(t)) { PostfixOp(t, Ident(tpnme.raw.STAR)) } - } else t - } /** TypeArgs ::= `[' Type {`,' Type} `]' * NamedTypeArgs ::= `[' NamedTypeArg {`,' NamedTypeArg} `]' @@ -2062,12 +1953,11 @@ object Parsers { /** TypeParamBounds ::= TypeBounds {`<%' Type} {`:' Type} */ - def typeParamBounds(pname: TypeName): Tree = { + def typeParamBounds(pname: TypeName): Tree = val t = typeBounds() val cbs = contextBounds(pname) if (cbs.isEmpty) t else atSpan((t.span union cbs.head.span).start) { ContextBounds(t, cbs) } - } def contextBounds(pname: TypeName): List[Tree] = if in.isColon then @@ -2114,13 +2004,12 @@ object Parsers { var t: Tree = atSpan(in.offset) { Parens(inParens(exprInParens())) } if in.token != altToken then if toBeContinued(altToken) then - t = inSepRegion(InCond) { + t = inSepRegion(InCond): expr1Rest( postfixExprRest( simpleExprRest(t, Location.ElseWhere), Location.ElseWhere), Location.ElseWhere) - } else if rewriteToNewSyntax(t.span) then dropParensOrBraces(t.span.start, s"${tokenString(altToken)}") @@ -2173,7 +2062,7 @@ object Parsers { def subExpr() = subPart(expr) - def expr(location: Location, inStatSeq: Boolean = false): Tree = { + def expr(location: Location, inStatSeq: Boolean = false): Tree = val start = in.offset in.token match case IMPLICIT => @@ -2183,7 +2072,7 @@ object Parsers { val tparams = typeParamClause(ParamOwner.TypeParam) val arrowOffset = accept(ARROW) val body = expr(location) - atSpan(start, arrowOffset) { + atSpan(start, arrowOffset): getFunction(body) match case Some(f) => checkFunctionNotErased(f, "poly function") @@ -2191,7 +2080,6 @@ object Parsers { case None => syntaxError(em"Implementation restriction: polymorphic function literals must have a value parameter", arrowOffset) errorTermTree(arrowOffset) - } case _ => val saved = placeholderParams placeholderParams = Nil @@ -2211,25 +2099,23 @@ object Parsers { else checkNonParamTuple(t) wrapPlaceholders(t) - } def expr1(location: Location = Location.ElseWhere, inStatSeq: Boolean = false): Tree = in.token match case IF => ifExpr(in.offset, If) case WHILE => - atSpan(skipToken()) { + atSpan(skipToken()): val cond = condExpr(DO) newLinesOpt() val body = subExpr() WhileDo(cond, body) - } case DO => report.errorOrMigrationWarning( em"""`do while ` is no longer supported, |use `while ; do ()` instead.${rewriteNotice()}""", in.sourcePos(), from = `3.0`) val start = skipToken() - atSpan(start) { + atSpan(start): val body = expr() if (isStatSep) nextToken() val whileStart = in.offset @@ -2238,18 +2124,16 @@ object Parsers { if migrateTo3 then patch(source, Span(start, start + 2), "while ({") patch(source, Span(whileStart, whileStart + 5), ";") - cond match { + cond match case Parens(_) => patch(source, Span(cond.span.start, cond.span.start + 1), "") patch(source, Span(cond.span.end - 1, cond.span.end), "") case _ => - } patch(source, cond.span.endPos, "}) ()") WhileDo(Block(body, cond), Literal(Constant(()))) - } case TRY => val tryOffset = in.offset - atSpan(skipToken()) { + atSpan(skipToken()): val body = expr() val (handler, handlerStart) = if in.token == CATCH then @@ -2260,7 +2144,7 @@ object Parsers { span) else (EmptyTree, -1) - handler match { + handler match case Block(Nil, EmptyTree) => assert(handlerStart != -1) syntaxErrorOrIncomplete( @@ -2268,31 +2152,26 @@ object Parsers { Span(handlerStart, endOffset(handler)) ) case _ => - } val finalizer = - if (in.token == FINALLY) { + if (in.token == FINALLY) nextToken(); val expr = subExpr() if expr.span.exists then expr else Literal(Constant(())) // finally without an expression - } - else { + else if handler.isEmpty then report.warning( EmptyCatchAndFinallyBlock(body), source.atSpan(Span(tryOffset, endOffset(body))) ) EmptyTree - } ParsedTry(body, handler, finalizer) - } case THROW => atSpan(skipToken()) { Throw(expr()) } case RETURN => - atSpan(skipToken()) { + atSpan(skipToken()): Return(if (isExprIntro) expr() else EmptyTree, EmptyTree) - } case FOR => forExpr() case _ => @@ -2318,10 +2197,9 @@ object Parsers { if in.token == EQUALS then t match case Ident(_) | Select(_, _) | Apply(_, _) | PrefixOp(_, _) => - atSpan(startOffset(t), skipToken()) { + atSpan(startOffset(t), skipToken()): val loc = if location.inArgs then location else Location.ElseWhere Assign(t, subPart(() => expr(loc))) - } case _ => t else if in.isColon then @@ -2330,8 +2208,8 @@ object Parsers { else t - def ascription(t: Tree, location: Location): Tree = atSpan(startOffset(t)) { - in.token match { + def ascription(t: Tree, location: Location): Tree = atSpan(startOffset(t)): + in.token match case USCORE if in.lookahead.isIdent(nme.raw.STAR) => val uscoreStart = skipToken() val isVarargSplice = location.inArgs && followingIsVararg() @@ -2355,41 +2233,35 @@ object Parsers { annotations().foldLeft(t)(Annotated) case _ => val tpt = typeDependingOn(location) - if (isWildcard(t) && !location.inPattern) { + if (isWildcard(t) && !location.inPattern) val vd :: rest = placeholderParams: @unchecked placeholderParams = cpy.ValDef(vd)(tpt = tpt).withSpan(vd.span.union(tpt.span)) :: rest - } Typed(t, tpt) - } - } /** `if' `(' Expr `)' {nl} Expr [[semi] else Expr] * `if' Expr `then' Expr [[semi] else Expr] */ def ifExpr(start: Offset, mkIf: (Tree, Tree, Tree) => If): If = - atSpan(start, skipToken()) { + atSpan(start, skipToken()): val cond = condExpr(THEN) newLinesOpt() val thenp = subExpr() val elsep = if (in.token == ELSE) { nextToken(); subExpr() } else EmptyTree mkIf(cond, thenp, elsep) - } /** MatchClause ::= `match' `{' CaseClauses `}' */ def matchClause(t: Tree): Match = - atSpan(startOffset(t), skipToken()) { + atSpan(startOffset(t), skipToken()): Match(t, inBracesOrIndented(caseClauses(() => caseClause()))) - } /** `match' `{' TypeCaseClauses `}' */ def matchType(t: Tree): MatchTypeTree = - atSpan(startOffset(t), accept(MATCH)) { + atSpan(startOffset(t), accept(MATCH)): MatchTypeTree(EmptyTree, t, inBracesOrIndented(caseClauses(typeCaseClause))) - } /** FunParams ::= Bindings * | id @@ -2406,40 +2278,35 @@ object Parsers { commaSeparated(() => binding(mods)) finally accept(RPAREN) - else { + else val start = in.offset val name = bindingName() val t = - if ((in.token == COLONop || in.token == COLONfollow) && location == Location.InBlock) { + if ((in.token == COLONop || in.token == COLONfollow) && location == Location.InBlock) report.errorOrMigrationWarning( em"This syntax is no longer supported; parameter needs to be enclosed in (...)${rewriteNotice(`future-migration`)}", source.atSpan(Span(start, in.lastOffset)), from = future) nextToken() val t = infixType() - if (sourceVersion == `future-migration`) { + if (sourceVersion == `future-migration`) patch(source, Span(start), "(") patch(source, Span(in.lastOffset), ")") - } t - } else TypeTree() (atSpan(start) { makeParameter(name, t, mods) }) :: Nil - } /** Binding ::= [`erased`] (id | `_') [`:' Type] */ def binding(mods: Modifiers): Tree = - atSpan(in.offset) { + atSpan(in.offset): val mods1 = if isErasedKw then addModifier(mods) else mods makeParameter(bindingName(), typedOpt(), mods1) - } def bindingName(): TermName = - if (in.token == USCORE) { + if (in.token == USCORE) nextToken() WildcardParamName.fresh() - } else ident() /** Expr ::= [‘implicit’] FunParams `=>' Expr @@ -2449,7 +2316,7 @@ object Parsers { closureRest(start, location, funParams(implicitMods, location)) def closureRest(start: Int, location: Location, params: List[Tree]): Tree = - atSpan(start, in.offset) { + atSpan(start, in.offset): if in.token == CTXARROW then if params.isEmpty then syntaxError(em"context function literals require at least one formal parameter", Span(start, in.lastOffset)) @@ -2461,7 +2328,6 @@ object Parsers { else if location == Location.InColonArg && in.token == INDENT then blockExpr() else expr() Function(params, body) - } /** PostfixExpr ::= InfixExpr [id [nl]] * InfixExpr ::= PrefixExpr @@ -2519,9 +2385,9 @@ object Parsers { * Quoted ::= ‘'’ ‘{’ Block ‘}’ * | ‘'’ ‘[’ Type ‘]’ */ - def simpleExpr(location: Location, inStatSeq: Boolean = false): Tree = { + def simpleExpr(location: Location, inStatSeq: Boolean = false): Tree = var canApply = true - val t = in.token match { + val t = in.token match case XMLSTART => xmlLiteral() case IDENTIFIER => @@ -2542,14 +2408,12 @@ object Parsers { canApply = false blockExpr(inStatSeq) case QUOTE => - atSpan(skipToken()) { - withinStaged(StageKind.Quoted | (if (location.inPattern) StageKind.QuotedPattern else 0)) { + atSpan(skipToken()): + withinStaged(StageKind.Quoted | (if (location.inPattern) StageKind.QuotedPattern else 0)): val body = if (in.token == LBRACKET) inBrackets(typ()) else stagedBlock() Quote(body, Nil) - } - } case NEW => canApply = false newExpr() @@ -2567,9 +2431,7 @@ object Parsers { val start = in.lastOffset syntaxErrorOrIncomplete(IllegalStartSimpleExpr(tokenString(in.token)), expectedOffset) errorTermTree(start) - } simpleExprRest(t, location, canApply) - } def simpleExprRest(t: Tree, location: Location, canApply: Boolean = true): Tree = if (canApply) argumentStart() @@ -2592,14 +2454,12 @@ object Parsers { if name.is(WildcardParamName) then assert(name == placeholderParams.head.name) placeholderParams = placeholderParams.tail - atSpan(startOffset(id)) { + atSpan(startOffset(id)): makeParameter(name.asTermName, typedOpt(), Modifiers(), isBackquoted = isBackquoted(id)) - } case _ => t else if isColonLambda then - val app = atSpan(startOffset(t), skipToken()) { + val app = atSpan(startOffset(t), skipToken()): Apply(t, expr(Location.InColonArg) :: Nil) - } simpleExprRest(app, location, canApply = true) else t end simpleExprRest @@ -2615,19 +2475,18 @@ object Parsers { if in.isNestedStart then Nil else constrApps(exclude = COMMA) possibleTemplateStart(isNew = true) - parents match { + parents match case parent :: Nil if !in.isNestedStart => reposition(if (parent.isType) ensureApplied(wrapNew(parent)) else parent) case _ => New(reposition(templateBodyOpt(emptyConstructor, parents, Nil))) - } /** ExprsInParens ::= ExprInParens {`,' ExprInParens} * Bindings ::= Binding {`,' Binding} */ def exprsInParensOrBindings(): List[Tree] = if in.token == RPAREN then Nil - else in.currentRegion.withCommasExpected { + else in.currentRegion.withCommasExpected: var isFormalParams = false def exprOrBinding() = if isErasedKw then isFormalParams = true @@ -2637,12 +2496,11 @@ object Parsers { if t.isInstanceOf[ValDef] then isFormalParams = true t commaSeparatedRest(exprOrBinding(), exprOrBinding) - } /** ParArgumentExprs ::= `(' [‘using’] [ExprsInParens] `)' * | `(' [ExprsInParens `,'] PostfixExpr `*' ')' */ - def parArgumentExprs(): (List[Tree], Boolean) = inParens { + def parArgumentExprs(): (List[Tree], Boolean) = inParens: if in.token == RPAREN then (Nil, false) else if isIdent(nme.using) then @@ -2650,7 +2508,6 @@ object Parsers { (commaSeparated(argumentExpr), true) else (commaSeparated(argumentExpr), false) - } /** ArgumentExprs ::= ParArgumentExprs * | [nl] BlockExpr @@ -2669,11 +2526,10 @@ object Parsers { /** ArgumentExprss ::= {ArgumentExprs} */ - def argumentExprss(fn: Tree): Tree = { + def argumentExprss(fn: Tree): Tree = argumentStart() if (in.token == LPAREN || in.isNestedStart) argumentExprss(mkApply(fn, argumentExprs())) else fn - } /** ParArgumentExprss ::= {ParArgumentExprs} * @@ -2682,10 +2538,10 @@ object Parsers { * parameter list, i.e. does not start with `( * * ident : ` * Furthermore, `()` is considered a annotation argument only if it comes first. */ - def parArgumentExprss(fn: Tree): Tree = { - def isLegalAnnotArg: Boolean = { + def parArgumentExprss(fn: Tree): Tree = + def isLegalAnnotArg: Boolean = val lookahead = in.LookaheadScanner() - (lookahead.token == LPAREN) && { + (lookahead.token == LPAREN) `&&`: lookahead.nextToken() if (lookahead.token == RPAREN) !fn.isInstanceOf[Trees.Apply[?]] // allow one () as annotation argument @@ -2693,39 +2549,33 @@ object Parsers { lookahead.nextToken() !lookahead.isColon else in.canStartExprTokens.contains(lookahead.token) - } - } if (in.token == LPAREN && (!inClassConstrAnnots || isLegalAnnotArg)) parArgumentExprss( atSpan(startOffset(fn)) { mkApply(fn, parArgumentExprs()) } ) else fn - } /** BlockExpr ::= <<< (CaseClauses | Block) >>> */ - def blockExpr(inStatSeq: Boolean = false): Tree = atSpan(in.offset) { + def blockExpr(inStatSeq: Boolean = false): Tree = atSpan(in.offset): val simplify = in.token == INDENT inDefScopeBraces({ if (in.token == CASE) Match(EmptyTree, caseClauses(() => caseClause())) else block(simplify) }, inStatSeq = inStatSeq) - } /** Block ::= BlockStatSeq * @note Return tree does not have a defined span. */ - def block(simplify: Boolean = false): Tree = { + def block(simplify: Boolean = false): Tree = val stats = blockStatSeq() def isExpr(stat: Tree) = !(stat.isDef || stat.isInstanceOf[Import]) - if (stats.nonEmpty && isExpr(stats.last)) { + if (stats.nonEmpty && isExpr(stats.last)) val inits = stats.init val last = stats.last if (inits.isEmpty && (simplify || last.isInstanceOf[Block])) last else Block(inits, last) - } else Block(stats, EmptyTree) - } /** Guard ::= if PostfixExpr */ @@ -2738,11 +2588,10 @@ object Parsers { def enumerators(): List[Tree] = generator() :: enumeratorsRest() def enumeratorsRest(): List[Tree] = - if (isStatSep) { + if (isStatSep) nextToken() if (in.token == DO || in.token == YIELD || in.token == RBRACE) Nil else enumerator() :: enumeratorsRest() - } else if (in.token == IF) guard() :: enumeratorsRest() else Nil @@ -2754,57 +2603,52 @@ object Parsers { def enumerator(): Tree = if (in.token == IF) guard() else if (in.token == CASE) generator() - else { + else val pat = pattern1() if (in.token == EQUALS) atSpan(startOffset(pat), skipToken()) { GenAlias(pat, subExpr()) } else generatorRest(pat, casePat = false) - } /** Generator ::= [‘case’] Pattern `<-' Expr */ - def generator(): Tree = { + def generator(): Tree = val casePat = if (in.token == CASE) { nextToken(); true } else false generatorRest(pattern1(), casePat) - } def generatorRest(pat: Tree, casePat: Boolean): GenFrom = - atSpan(startOffset(pat), accept(LARROW)) { + atSpan(startOffset(pat), accept(LARROW)): val checkMode = if casePat then GenCheckMode.FilterAlways else if sourceVersion.isAtLeast(`future`) then GenCheckMode.Check else if sourceVersion.isAtLeast(`3.2`) then GenCheckMode.CheckAndFilter else GenCheckMode.FilterNow // filter on source version < 3.2, for backward compat GenFrom(pat, subExpr(), checkMode) - } /** ForExpr ::= ‘for’ ‘(’ Enumerators ‘)’ {nl} [‘do‘ | ‘yield’] Expr * | ‘for’ ‘{’ Enumerators ‘}’ {nl} [‘do‘ | ‘yield’] Expr * | ‘for’ Enumerators (‘do‘ | ‘yield’) Expr */ def forExpr(): Tree = - atSpan(skipToken()) { + atSpan(skipToken()): var wrappedEnums = true val start = in.offset val forEnd = in.lastOffset val leading = in.token val enums = - if (leading == LBRACE || leading == LPAREN && followingIsEnclosedGenerators()) { + if (leading == LBRACE || leading == LPAREN && followingIsEnclosedGenerators()) nextToken() val res = if (leading == LBRACE || in.token == CASE) enumerators() - else { + else val pats = patternsOpt() val pat = - if (in.token == RPAREN || pats.length > 1) { + if (in.token == RPAREN || pats.length > 1) wrappedEnums = false accept(RPAREN) atSpan(start) { makeTupleOrParens(pats) } // note: alternatives `|' need to be weeded out by typer. - } else pats.head generatorRest(pat, casePat = false) :: enumeratorsRest() - } - if (wrappedEnums) { + if (wrappedEnums) val closingOnNewLine = in.isAfterLineEnd accept(leading + 1) def hasMultiLineEnum = @@ -2816,63 +2660,51 @@ object Parsers { // Don't rewrite if that could change meaning of newlines newLinesOpt() dropParensOrBraces(start, if (in.token == YIELD || in.token == DO) "" else "do") - } in.observeIndented() res - } - else { + else wrappedEnums = false if (in.token == INDENT) inBracesOrIndented(enumerators()) - else { + else val ts = inSepRegion(InFor)(enumerators()) if (rewriteToOldSyntax(Span(start)) && ts.nonEmpty) - if (ts.head.sourcePos.startLine != ts.last.sourcePos.startLine) { + if (ts.head.sourcePos.startLine != ts.last.sourcePos.startLine) patch(source, Span(forEnd), " {") patch(source, Span(in.offset), "} ") - } - else { + else patch(source, ts.head.span.startPos, "(") patch(source, ts.last.span.endPos, ")") - } ts - } - } newLinesOpt() - if (in.token == YIELD) { + if (in.token == YIELD) nextToken() ForYield(enums, subExpr()) - } - else if (in.token == DO) { + else if (in.token == DO) if (rewriteToOldSyntax()) dropTerminator() nextToken() ForDo(enums, subExpr()) - } - else { + else if (!wrappedEnums) syntaxErrorOrIncomplete(YieldOrDoExpectedInForComprehension()) ForDo(enums, expr()) - } - } /** CaseClauses ::= CaseClause {CaseClause} * TypeCaseClauses ::= TypeCaseClause {TypeCaseClause} */ - def caseClauses(clause: () => CaseDef): List[CaseDef] = { + def caseClauses(clause: () => CaseDef): List[CaseDef] = val buf = new ListBuffer[CaseDef] buf += clause() while (in.token == CASE) buf += clause() buf.toList - } /** CaseClause ::= ‘case’ Pattern [Guard] `=>' Block * ExprCaseClause ::= ‘case’ Pattern [Guard] ‘=>’ Expr */ - def caseClause(exprOnly: Boolean = false): CaseDef = atSpan(in.offset) { - val (pat, grd) = inSepRegion(InCase) { + def caseClause(exprOnly: Boolean = false): CaseDef = atSpan(in.offset): + val (pat, grd) = inSepRegion(InCase): accept(CASE) (pattern(), guard()) - } CaseDef(pat, grd, atSpan(accept(ARROW)) { indentedRegionAfterArrow({ if exprOnly then @@ -2885,28 +2717,24 @@ object Parsers { else block() }, inCaseDef = true) }) - } /** TypeCaseClause ::= ‘case’ (InfixType | ‘_’) ‘=>’ Type [semi] */ - def typeCaseClause(): CaseDef = atSpan(in.offset) { - val pat = inSepRegion(InCase) { + def typeCaseClause(): CaseDef = atSpan(in.offset): + val pat = inSepRegion(InCase): accept(CASE) - in.token match { + in.token match case USCORE if in.lookahead.isArrow => val start = skipToken() Ident(tpnme.WILDCARD).withSpan(Span(start, in.lastOffset, start)) case _ => rejectWildcardType(infixType()) - } - } CaseDef(pat, EmptyTree, atSpan(accept(ARROW)) { val t = indentedRegionAfterArrow(rejectWildcardType(typ()), inCaseDef = true) if in.token == SEMI then nextToken() newLinesOptWhenFollowedBy(CASE) t }) - } /* -------- PATTERNS ------------------------------------------- */ @@ -2965,14 +2793,13 @@ object Parsers { val pattern2: () => Tree = () => pattern3() match case p @ Ident(name) if in.token == AT => val offset = skipToken() - pattern3() match { + pattern3() match case pt @ Bind(nme.WILDCARD, pt1: Typed) if pt.mods.is(Given) => atSpan(startOffset(p), 0) { Bind(name, pt1).withMods(pt.mods) } case Typed(Ident(nme.WILDCARD), pt @ Ident(tpnme.WILDCARD_STAR)) => atSpan(startOffset(p), 0) { Typed(p, pt) } case pt => atSpan(startOffset(p), 0) { Bind(name, pt) } - } case p => p @@ -3000,7 +2827,7 @@ object Parsers { * PatVar ::= id * | `_' */ - def simplePattern(): Tree = in.token match { + def simplePattern(): Tree = in.token match case IDENTIFIER | BACKQUOTED_IDENT | THIS | SUPER => simpleRef() match case id @ Ident(nme.raw.MINUS) if isNumericLit => literal(startOffset(id)) @@ -3014,19 +2841,16 @@ object Parsers { case XMLSTART => xmlLiteralPattern() case GIVEN => - atSpan(in.offset) { + atSpan(in.offset): val givenMod = atSpan(skipToken())(Mod.Given()) val typed = Typed(Ident(nme.WILDCARD), refinedType()) Bind(nme.WILDCARD, typed).withMods(addMod(Modifiers(), givenMod)) - } case _ => if (isLiteral) literal(inPattern = true) - else { + else val start = in.lastOffset syntaxErrorOrIncomplete(IllegalStartOfSimplePattern(), expectedOffset) errorTermTree(start) - } - } val simplePatternFn: Location => Tree = _ => simplePattern() @@ -3058,7 +2882,7 @@ object Parsers { /* -------- MODIFIERS and ANNOTATIONS ------------------------------------------- */ - private def modOfToken(tok: Int, name: Name): Mod = tok match { + private def modOfToken(tok: Int, name: Name): Mod = tok match case ABSTRACT => Mod.Abstract() case FINAL => Mod.Final() case IMPLICIT => Mod.Implicit() @@ -3069,15 +2893,13 @@ object Parsers { case PROTECTED => Mod.Protected() case SEALED => Mod.Sealed() case IDENTIFIER => - name match { + name match case nme.erased if in.erasedEnabled => Mod.Erased() case nme.inline => Mod.Inline() case nme.opaque => Mod.Opaque() case nme.open => Mod.Open() case nme.transparent => Mod.Transparent() case nme.infix => Mod.Infix() - } - } /** Drop `private' modifier when followed by a qualifier. * Contract `abstract' and `override' to ABSOVERRIDE @@ -3090,14 +2912,13 @@ object Parsers { else mods - private def addModifier(mods: Modifiers): Modifiers = { + private def addModifier(mods: Modifiers): Modifiers = val tok = in.token val name = in.name val mod = atSpan(skipToken()) { modOfToken(tok, name) } if (mods.isOneOf(mod.flags)) syntaxError(RepeatedModifier(mod.flags.flagsString)) addMod(mods, mod) - } def addFlag(mods: Modifiers, flag: FlagSet): Modifiers = mods.withAddedFlags(flag, Span(in.offset)) @@ -3110,10 +2931,10 @@ object Parsers { /** AccessQualifier ::= "[" (id | this) "]" */ def accessQualifierOpt(mods: Modifiers): Modifiers = - if (in.token == LBRACKET) { + if (in.token == LBRACKET) if (mods.is(Local) || mods.hasPrivateWithin) syntaxError(DuplicatePrivateProtectedQualifier()) - inBrackets { + inBrackets: if in.token == THIS then if sourceVersion.isAtLeast(future) then deprecationWarning( @@ -3121,8 +2942,6 @@ object Parsers { nextToken() mods | Local else mods.withPrivateWithin(ident().toTypeName) - } - } else mods /** {Annotation} {Modifier} @@ -3135,7 +2954,7 @@ object Parsers { * | opaque * LocalModifier ::= abstract | final | sealed | open | implicit | lazy | erased | inline | transparent */ - def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = { + def modifiers(allowed: BitSet = modifierTokens, start: Modifiers = Modifiers()): Modifiers = @tailrec def loop(mods: Modifiers): Modifiers = if allowed.contains(in.token) @@ -3146,42 +2965,36 @@ object Parsers { val isAccessMod = accessModifierTokens contains in.token val mods1 = addModifier(mods) loop(if (isAccessMod) accessQualifierOpt(mods1) else mods1) - else if (in.isNewLine && (mods.hasFlags || mods.hasAnnotations)) { + else if (in.isNewLine && (mods.hasFlags || mods.hasAnnotations)) nextToken() loop(mods) - } else mods normalize(loop(start)) - } /** Wrap annotation or constructor in New(...). */ def wrapNew(tpt: Tree): Select = Select(New(tpt), nme.CONSTRUCTOR) /** Adjust start of annotation or constructor to offset of preceding @ or new */ - def adjustStart(start: Offset)(tree: Tree): Tree = { - val tree1 = tree match { + def adjustStart(start: Offset)(tree: Tree): Tree = + val tree1 = tree match case Apply(fn, args) => cpy.Apply(tree)(adjustStart(start)(fn), args) case Select(qual, name) => cpy.Select(tree)(adjustStart(start)(qual), name) case _ => tree - } if (tree1.span.exists && start < tree1.span.start) tree1.withSpan(tree1.span.withStart(start)) else tree1 - } /** Annotation ::= `@' SimpleType1 {ParArgumentExprs} */ def annot(): Tree = - adjustStart(accept(AT)) { + adjustStart(accept(AT)): ensureApplied(parArgumentExprss(wrapNew(simpleType1()))) - } - def annotations(skipNewLines: Boolean = false): List[Tree] = { + def annotations(skipNewLines: Boolean = false): List[Tree] = if (skipNewLines) newLinesOptWhenFollowedBy(AT) if (in.token == AT) annot() :: annotations(skipNewLines) else Nil - } def annotsAsMods(skipNewLines: Boolean = false): Modifiers = Modifiers() withAnnotations annotations(skipNewLines) @@ -3242,7 +3055,7 @@ object Parsers { * HkTypeParamClause ::= ‘[’ HkTypeParam {‘,’ HkTypeParam} ‘]’ * HkTypeParam ::= {Annotation} [‘+’ | ‘-’] (id [HkTypePamClause] | ‘_’) TypeBounds */ - def typeParamClause(ownerKind: ParamOwner): List[TypeDef] = inBrackets { + def typeParamClause(ownerKind: ParamOwner): List[TypeDef] = inBrackets: def checkVarianceOK(): Boolean = val ok = ownerKind != ParamOwner.Def && ownerKind != ParamOwner.TypeParam @@ -3250,7 +3063,7 @@ object Parsers { nextToken() ok - def typeParam(): TypeDef = { + def typeParam(): TypeDef = val isAbstractOwner = ownerKind == ParamOwner.Type || ownerKind == ParamOwner.TypeParam val start = in.offset var mods = annotsAsMods() | Param @@ -3263,20 +3076,16 @@ object Parsers { mods |= Covariant else if isIdent(nme.raw.MINUS) && checkVarianceOK() then mods |= Contravariant - atSpan(start, nameStart) { + atSpan(start, nameStart): val name = - if (isAbstractOwner && in.token == USCORE) { + if (isAbstractOwner && in.token == USCORE) nextToken() WildcardParamName.fresh().toTypeName - } else ident().toTypeName val hkparams = typeParamClauseOpt(ParamOwner.Type) val bounds = if (isAbstractOwner) typeBounds() else typeParamBounds(name) TypeDef(name, lambdaAbstract(hkparams, bounds)).withMods(mods) - } - } commaSeparated(() => typeParam()) - } def typeParamClauseOpt(ownerKind: ParamOwner): List[TypeDef] = if (in.token == LBRACKET) typeParamClause(ownerKind) else Nil @@ -3315,7 +3124,7 @@ object Parsers { prefix: Boolean = false, // clause precedes name of an extension method givenOnly: Boolean = false, // only given parameters allowed firstClause: Boolean = false // clause is the first in regular list of clauses - ): List[ValDef] = { + ): List[ValDef] = var impliedMods: Modifiers = EmptyModifiers def addParamMod(mod: () => Mod) = impliedMods = addMod(impliedMods, atSpan(skipToken()) { mod() }) @@ -3327,12 +3136,12 @@ object Parsers { if isIdent(nme.using) then addParamMod(() => Mod.Given()) - def param(): ValDef = { + def param(): ValDef = val start = in.offset var mods = impliedMods.withAnnotations(annotations()) if isErasedKw then mods = addModifier(mods) - if (ofClass) { + if (ofClass) mods = addFlag(modifiers(start = mods), ParamAccessor) mods = if in.token == VAL then @@ -3346,13 +3155,11 @@ object Parsers { syntaxError(em"`val` or `var` expected") if (firstClause && ofCaseClass) mods else mods | PrivateLocal - } - else { + else if (isIdent(nme.inline) && in.isSoftModifierInParamModifierPosition) mods = addModifier(mods) mods |= Param - } - atSpan(start, nameStart) { + atSpan(start, nameStart): val name = ident() acceptColon() if (in.token == ARROW && ofClass && !mods.is(Local)) @@ -3365,23 +3172,19 @@ object Parsers { if (impliedMods.mods.nonEmpty) impliedMods = impliedMods.withMods(Nil) // keep only flags, so that parameter positions don't overlap ValDef(name, tpt, default).withMods(mods) - } - } - def checkVarArgsRules(vparams: List[ValDef]): Unit = vparams match { + def checkVarArgsRules(vparams: List[ValDef]): Unit = vparams match case Nil => case _ :: Nil if !prefix => case vparam :: rest => - vparam.tpt match { + vparam.tpt match case PostfixOp(_, op) if op.name == tpnme.raw.STAR => syntaxError(VarArgsParamMustComeLast(), vparam.tpt.span) case _ => - } checkVarArgsRules(rest) - } // begin termParamClause - inParens { + inParens: if in.token == RPAREN && !prefix && !impliedMods.is(Given) then Nil else val clause = @@ -3402,14 +3205,11 @@ object Parsers { || isIdent && (in.name == nme.inline || in.lookahead.isColon) (mods, isParams) (if isParams then commaSeparated(() => param()) - else contextTypes(ofClass, numLeadParams, impliedMods)) match { + else contextTypes(ofClass, numLeadParams, impliedMods)) match case Nil => Nil case (h :: t) => h.withAddedFlags(firstParamMod.flags) :: t - } checkVarArgsRules(clause) clause - } - } /** ClsTermParamClauses ::= {ClsTermParamClause} [[nl] ‘(’ [‘implicit’] ClsParams ‘)’] * TypelessClauses ::= TypelessClause {TypelessClause} @@ -3453,9 +3253,9 @@ object Parsers { /** Import ::= `import' ImportExpr {‘,’ ImportExpr} * Export ::= `export' ImportExpr {‘,’ ImportExpr} */ - def importOrExportClause(leading: Token, mkTree: ImportConstr): List[Tree] = { + def importOrExportClause(leading: Token, mkTree: ImportConstr): List[Tree] = val offset = accept(leading) - commaSeparated(importExpr(mkTree)) match { + commaSeparated(importExpr(mkTree)) match case t :: rest => // The first import should start at the start offset of the keyword. val firstPos = @@ -3463,8 +3263,6 @@ object Parsers { else Span(offset, in.lastOffset) t.withSpan(firstPos) :: rest case nil => nil - } - } def exportClause() = importOrExportClause(EXPORT, Export(_,_)) @@ -3541,14 +3339,13 @@ object Parsers { patch(source, Span(in.offset, in.offset + 2), if testChar(in.offset - 1, ' ') && testChar(in.offset + 2, ' ') then "as" else " as ") - atSpan(startOffset(from), skipToken()) { + atSpan(startOffset(from), skipToken()): val to = if in.token == USCORE then wildcardIdent() else termIdent() ImportSelector(from, if to.name == nme.ERROR then EmptyTree else to) - } else ImportSelector(from) def importSelector(idOK: Boolean): ImportSelector = - atSpan(in.offset) { + atSpan(in.offset): in.token match case USCORE => wildcardSelector() case GIVEN => givenSelector() @@ -3557,7 +3354,6 @@ object Parsers { else if !idOK then syntaxError(em"named imports cannot follow wildcard imports") namedSelector(termIdent()) - } def importSelectors(): List[ImportSelector] = var idOK = true @@ -3610,7 +3406,7 @@ object Parsers { * | type {nl} TypeDcl * EnumCase ::= `case' (id ClassConstr [`extends' ConstrApps]] | ids) */ - def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match { + def defOrDcl(start: Int, mods: Modifiers): Tree = in.token match case VAL => nextToken() patDefOrDcl(start, mods) @@ -3626,7 +3422,6 @@ object Parsers { enumCase(start, mods) case _ => tmplDef(start, mods) - } /** PatDef ::= ids [‘:’ Type] ‘=’ Expr * | Pattern2 [‘:’ Type] ‘=’ Expr @@ -3635,15 +3430,14 @@ object Parsers { * ValDcl ::= id {`,' id} `:' Type * VarDcl ::= id {`,' id} `:' Type */ - def patDefOrDcl(start: Offset, mods: Modifiers): Tree = atSpan(start, nameStart) { + def patDefOrDcl(start: Offset, mods: Modifiers): Tree = atSpan(start, nameStart): val first = pattern2() - var lhs = first match { + var lhs = first match case id: Ident if in.token == COMMA => nextToken() id :: commaSeparated(() => termIdent()) case _ => first :: Nil - } val tpt = typedOpt() val rhs = if tpt.isEmpty || in.token == EQUALS then @@ -3661,16 +3455,15 @@ object Parsers { atSpan(rhs0.span) { Ident(nme.WILDCARD) } case rhs0 => rhs0 else EmptyTree - lhs match { + lhs match case IdPattern(id, t) :: Nil if t.isEmpty => val vdef = ValDef(id.name.asTermName, tpt, rhs) if (isBackquoted(id)) vdef.pushAttachment(Backquoted, ()) finalizeDef(vdef, mods, start) case _ => - def isAllIds = lhs.forall { + def isAllIds = lhs.forall: case IdPattern(id, t) => t.isEmpty case _ => false - } val rhs2 = if rhs.isEmpty && !isAllIds then val start = in.lastOffset @@ -3679,8 +3472,6 @@ object Parsers { else rhs PatDef(mods, lhs, tpt, rhs2) - } - } /** DefDef ::= DefSig [‘:’ Type] ‘=’ Expr * | this TypelessClauses [DefImplicitClause] `=' ConstrExpr @@ -3690,7 +3481,7 @@ object Parsers { * if clauseInterleaving is enabled: * DefSig ::= id [DefParamClauses] [DefImplicitClause] */ - def defDefOrDcl(start: Offset, mods: Modifiers, numLeadParams: Int = 0): DefDef = atSpan(start, nameStart) { + def defDefOrDcl(start: Offset, mods: Modifiers, numLeadParams: Int = 0): DefDef = atSpan(start, nameStart): def scala2ProcedureSyntax(resultTypeStr: String) = def toInsert = @@ -3705,23 +3496,20 @@ object Parsers { else false - if (in.token == THIS) { + if (in.token == THIS) nextToken() val vparamss = termParamClauses(numLeadParams = numLeadParams) if (vparamss.isEmpty || vparamss.head.take(1).exists(_.mods.isOneOf(GivenOrImplicit))) - in.token match { + in.token match case LBRACKET => syntaxError(em"no type parameters allowed here") case EOF => incompleteInputError(AuxConstructorNeedsNonImplicitParameter()) case _ => syntaxError(AuxConstructorNeedsNonImplicitParameter(), nameStart) - } if (migrateTo3) newLineOptWhenFollowedBy(LBRACE) - val rhs = { + val rhs = if (!(in.token == LBRACE && scala2ProcedureSyntax(""))) accept(EQUALS) atSpan(in.offset) { subPart(constrExpr) } - } makeConstructor(Nil, vparamss, rhs).withMods(mods).setComment(in.getDocComment(start)) - } - else { + else val mods1 = addFlag(mods, Method) val ident = termIdent() var name = ident.name.asTermName @@ -3756,84 +3544,72 @@ object Parsers { val ddef = DefDef(name, paramss, tpt, rhs) if (isBackquoted(ident)) ddef.pushAttachment(Backquoted, ()) finalizeDef(ddef, mods1, start) - } - } /** ConstrExpr ::= SelfInvocation * | `{' SelfInvocation {semi BlockStat} `}' */ val constrExpr: () => Tree = () => if in.isNestedStart then - atSpan(in.offset) { - inBracesOrIndented { + atSpan(in.offset): + inBracesOrIndented: val stats = selfInvocation() :: ( if isStatSep then nextToken() blockStatSeq() - else Nil + else Nil ) Block(stats, Literal(Constant(()))) - } - } else Block(selfInvocation() :: Nil, Literal(Constant(()))) /** SelfInvocation ::= this ArgumentExprs {ArgumentExprs} */ def selfInvocation(): Tree = - atSpan(accept(THIS)) { + atSpan(accept(THIS)): argumentStart() argumentExprss(mkApply(Ident(nme.CONSTRUCTOR), argumentExprs())) - } /** TypeDcl ::= id [TypeParamClause] {FunParamClause} TypeBounds [‘=’ Type] */ - def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = { + def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = newLinesOpt() - atSpan(start, nameStart) { + atSpan(start, nameStart): val nameIdent = typeIdent() val tparams = typeParamClauseOpt(ParamOwner.Type) val vparamss = funParamClauses() - def makeTypeDef(rhs: Tree): Tree = { + def makeTypeDef(rhs: Tree): Tree = val rhs1 = lambdaAbstractAll(tparams :: vparamss, rhs) val tdef = TypeDef(nameIdent.name.toTypeName, rhs1) if (nameIdent.isBackquoted) tdef.pushAttachment(Backquoted, ()) finalizeDef(tdef, mods, start) - } - in.token match { + in.token match case EQUALS => nextToken() makeTypeDef(toplevelTyp()) case SUBTYPE | SUPERTYPE => val bounds = typeBounds() - if (in.token == EQUALS) { + if (in.token == EQUALS) val eqOffset = skipToken() var rhs = toplevelTyp() - rhs match { + rhs match case mtt: MatchTypeTree => - bounds match { + bounds match case TypeBoundsTree(EmptyTree, upper, _) => rhs = MatchTypeTree(upper, mtt.selector, mtt.cases) case _ => syntaxError(em"cannot combine lower bound and match type alias", eqOffset) - } case _ => if mods.is(Opaque) then rhs = TypeBoundsTree(bounds.lo, bounds.hi, rhs) else syntaxError(em"cannot combine bound and alias", eqOffset) - } makeTypeDef(rhs) - } else makeTypeDef(bounds) case SEMI | NEWLINE | NEWLINES | COMMA | RBRACE | OUTDENT | EOF => makeTypeDef(typeBounds()) case _ => syntaxErrorOrIncomplete(ExpectedTypeBoundOrEquals(in.token)) return EmptyTree // return to avoid setting the span to EmptyTree - } - } - } /** TmplDef ::= ([‘case’] ‘class’ | ‘trait’) ClassDef * | [‘case’] ‘object’ ObjectDef @@ -3841,7 +3617,7 @@ object Parsers { * | ‘given’ GivenDef */ def tmplDef(start: Int, mods: Modifiers): Tree = - in.token match { + in.token match case TRAIT => classDef(start, skipToken(addFlag(mods, Trait))) case CLASS => @@ -3859,18 +3635,15 @@ object Parsers { case _ => val start = in.lastOffset syntaxErrorOrIncomplete(ExpectedStartOfTopLevelDefinition()) - mods.annotations match { + mods.annotations match case head :: Nil => head case Nil => EmptyTree case all => Block(all, errorTermTree(start)) - } - } /** ClassDef ::= id ClassConstr TemplateOpt */ - def classDef(start: Offset, mods: Modifiers): TypeDef = atSpan(start, nameStart) { + def classDef(start: Offset, mods: Modifiers): TypeDef = atSpan(start, nameStart): classDefRest(start, mods, ident().toTypeName) - } def classDefRest(start: Offset, mods: Modifiers, name: TypeName): TypeDef = val constr = classConstr(isCaseClass = mods.is(Case)) @@ -3879,12 +3652,11 @@ object Parsers { /** ClassConstr ::= [ClsTypeParamClause] [ConstrMods] ClsTermParamClauses */ - def classConstr(isCaseClass: Boolean = false): DefDef = atSpan(in.lastOffset) { + def classConstr(isCaseClass: Boolean = false): DefDef = atSpan(in.lastOffset): val tparams = typeParamClauseOpt(ParamOwner.Class) val cmods = fromWithinClassConstr(constrModsOpt()) val vparamss = termParamClauses(ofClass = true, ofCaseClass = isCaseClass) makeConstructor(tparams, vparamss).withMods(cmods) - } /** ConstrMods ::= {Annotation} [AccessModifier] */ @@ -3893,11 +3665,10 @@ object Parsers { /** ObjectDef ::= id TemplateOpt */ - def objectDef(start: Offset, mods: Modifiers): ModuleDef = atSpan(start, nameStart) { + def objectDef(start: Offset, mods: Modifiers): ModuleDef = atSpan(start, nameStart): val name = ident() val templ = templateOpt(emptyConstructor) finalizeDef(ModuleDef(name, templ), mods, start) - } private def checkAccessOnly(mods: Modifiers, where: String): Modifiers = val mods1 = mods & (AccessFlags | Enum) @@ -3907,55 +3678,47 @@ object Parsers { /** EnumDef ::= id ClassConstr InheritClauses EnumBody */ - def enumDef(start: Offset, mods: Modifiers): TypeDef = atSpan(start, nameStart) { + def enumDef(start: Offset, mods: Modifiers): TypeDef = atSpan(start, nameStart): val mods1 = checkAccessOnly(mods, "definitions") val modulName = ident() val clsName = modulName.toTypeName val constr = classConstr() val templ = template(constr, isEnum = true) finalizeDef(TypeDef(clsName, templ), mods1, start) - } /** EnumCase = `case' (id ClassConstr [`extends' ConstrApps] | ids) */ - def enumCase(start: Offset, mods: Modifiers): DefTree = { + def enumCase(start: Offset, mods: Modifiers): DefTree = val mods1 = checkAccessOnly(mods, "cases") | EnumCase accept(CASE) - atSpan(start, nameStart) { + atSpan(start, nameStart): val id = termIdent() - if (in.token == COMMA) { + if (in.token == COMMA) nextToken() val ids = commaSeparated(() => termIdent()) PatDef(mods1, id :: ids, TypeTree(), EmptyTree) - } - else { + else val caseDef = - if (in.token == LBRACKET || in.token == LPAREN || in.token == AT || isModifier) { + if (in.token == LBRACKET || in.token == LPAREN || in.token == AT || isModifier) val clsName = id.name.toTypeName val constr = classConstr(isCaseClass = true) TypeDef(clsName, caseTemplate(constr)) - } else ModuleDef(id.name.toTermName, caseTemplate(emptyConstructor)) finalizeDef(caseDef, mods1, start) - } - } - } /** [`extends' ConstrApps] */ - def caseTemplate(constr: DefDef): Template = { + def caseTemplate(constr: DefDef): Template = val parents = - if (in.token == EXTENDS) { + if (in.token == EXTENDS) nextToken() constrApps() - } else Nil Template(constr, parents, Nil, EmptyValDef, Nil) - } def checkExtensionMethod(tparams: List[Tree], - vparamss: List[List[Tree]], stat: Tree): Unit = stat match { + vparamss: List[List[Tree]], stat: Tree): Unit = stat match case stat: DefDef => if stat.mods.is(ExtensionMethod) && vparamss.nonEmpty then syntaxError(em"no extension method allowed here since leading parameter was already given", stat.span) @@ -3969,12 +3732,11 @@ object Parsers { case EmptyTree => case stat => syntaxError(em"extension clause can only define methods", stat.span) - } /** GivenDef ::= [GivenSig] (AnnotType [‘=’ Expr] | StructuralInstance) * GivenSig ::= [id] [DefTypeParamClause] {UsingParamClauses} ‘:’ */ - def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart) { + def givenDef(start: Offset, mods: Modifiers, givenMod: Mod) = atSpan(start, nameStart): var mods1 = addMod(mods, givenMod) val nameStart = in.offset val name = if isIdent && followingIsGivenSig() then ident() else EmptyTermName @@ -4017,7 +3779,6 @@ object Parsers { else TypeDef(name.toTypeName, templ) end gdef finalizeDef(gdef, mods1, start) - } /** Extension ::= ‘extension’ [DefTypeParamClause] {UsingParamClause} ‘(’ DefTermParam ‘)’ * {UsingParamClause} ExtMethods @@ -4065,7 +3826,7 @@ object Parsers { /** ExtMethods ::= ExtMethod | [nl] ‘{’ ExtMethod {semi ExtMethod ‘}’ */ - def extMethods(numLeadParams: Int): List[Tree] = checkNoEscapingPlaceholders { + def extMethods(numLeadParams: Int): List[Tree] = checkNoEscapingPlaceholders: val meths = new ListBuffer[Tree] while val start = in.offset @@ -4080,7 +3841,6 @@ object Parsers { do () if meths.isEmpty then syntaxErrorOrIncomplete(em"`def` expected") meths.toList - } /* -------- TEMPLATES ------------------------------------------- */ @@ -4118,25 +3878,22 @@ object Parsers { /** Template ::= InheritClauses [TemplateBody] * InheritClauses ::= [‘extends’ ConstrApps] [‘derives’ QualId {‘,’ QualId}] */ - def template(constr: DefDef, isEnum: Boolean = false): Template = { + def template(constr: DefDef, isEnum: Boolean = false): Template = val parents = - if (in.token == EXTENDS) { + if (in.token == EXTENDS) nextToken() - if (in.token == LBRACE || in.token == COLONeol) { + if (in.token == LBRACE || in.token == COLONeol) report.errorOrMigrationWarning( em"`extends` must be followed by at least one parent", in.sourcePos(), from = `3.0`) Nil - } else constrApps() - } else Nil newLinesOptWhenFollowedBy(nme.derives) val derived = - if (isIdent(nme.derives)) { + if (isIdent(nme.derives)) nextToken() commaSeparated(() => convertToTypeId(qualId())) - } else Nil possibleTemplateStart() if isEnum then @@ -4144,7 +3901,6 @@ object Parsers { Template(constr, parents, derived, self, stats) else templateBodyOpt(constr, parents, derived) - } /** TemplateOpt = [Template] */ @@ -4190,9 +3946,8 @@ object Parsers { /* -------- STATSEQS ------------------------------------------- */ /** Create a tree representing a packaging */ - def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match { + def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match case x: RefTree => atSpan(start, pointOffset(pkg))(PackageDef(x, stats)) - } /** Packaging ::= package QualId [nl] `{' TopStatSeq `}' */ @@ -4211,18 +3966,16 @@ object Parsers { * | Extension * | */ - def topStatSeq(outermost: Boolean = false): List[Tree] = { + def topStatSeq(outermost: Boolean = false): List[Tree] = val stats = new ListBuffer[Tree] while var empty = false - if (in.token == PACKAGE) { + if (in.token == PACKAGE) val start = skipToken() - if (in.token == OBJECT) { + if (in.token == OBJECT) nextToken() stats += objectDef(start, Modifiers(Package)) - } else stats += packaging(start) - } else if (in.token == IMPORT) stats ++= importClause(outermost) else if (in.token == EXPORT) @@ -4236,7 +3989,6 @@ object Parsers { statSepOrEnd(stats, noPrevStat = empty, "toplevel definition") do () stats.toList - } /** SelfType ::= id [‘:’ InfixType] ‘=>’ * | ‘this’ ‘:’ InfixType ‘=>’ @@ -4246,7 +3998,7 @@ object Parsers { && (in.lookahead.isColon && followingIsSelfType() || in.lookahead.token == ARROW) then - atSpan(in.offset) { + atSpan(in.offset): val selfName = if in.token == THIS then nextToken() @@ -4265,7 +4017,6 @@ object Parsers { else syntaxError(em"`=>` expected after self type") makeSelfDef(selfName, selfTpt) - } else EmptyValDef /** TemplateStatSeq ::= [SelfType] TemplateStat {semi TemplateStat} @@ -4279,7 +4030,7 @@ object Parsers { * EnumStat ::= TemplateStat * | Annotations Modifiers EnumCase */ - def templateStatSeq(): (ValDef, List[Tree]) = checkNoEscapingPlaceholders { + def templateStatSeq(): (ValDef, List[Tree]) = checkNoEscapingPlaceholders: val stats = new ListBuffer[Tree] val startsAfterLineEnd = in.isAfterLineEnd val self = selfType() @@ -4304,7 +4055,6 @@ object Parsers { indentedRegionAfterArrow(loop) else loop (self, if stats.isEmpty then List(EmptyTree) else stats.toList) - } /** RefineStatSeq ::= RefineStat {semi RefineStat} * RefineStat ::= ‘val’ VarDcl @@ -4312,7 +4062,7 @@ object Parsers { * | ‘type’ {nl} TypeDcl * (in reality we admit Defs and vars and filter them out afterwards in `checkLegal`) */ - def refineStatSeq(): List[Tree] = { + def refineStatSeq(): List[Tree] = val stats = new ListBuffer[Tree] def checkLegal(tree: Tree): List[Tree] = def ok = tree :: Nil @@ -4345,9 +4095,8 @@ object Parsers { statSepOrEnd(stats, noPrevStat = !dclFound, what) do () stats.toList - } - def localDef(start: Int, implicitMods: Modifiers = EmptyModifiers): Tree = { + def localDef(start: Int, implicitMods: Modifiers = EmptyModifiers): Tree = var mods = defAnnotsMods(localModifierTokens) for (imod <- implicitMods.mods) mods = addMod(mods, imod) if (mods.is(Final)) @@ -4355,7 +4104,6 @@ object Parsers { tmplDef(start, mods) else defOrDcl(start, mods) - } /** BlockStatSeq ::= { BlockStat semi } [Expr] * BlockStat ::= Import @@ -4365,7 +4113,7 @@ object Parsers { * | Expr1 * | */ - def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders { + def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders: val stats = new ListBuffer[Tree] while var empty = false @@ -4384,25 +4132,22 @@ object Parsers { statSepOrEnd(stats, noPrevStat = empty, altEnd = CASE) do () stats.toList - } /** CompilationUnit ::= {package QualId semi} TopStatSeq */ - def compilationUnit(): Tree = checkNoEscapingPlaceholders { - def topstats(): List[Tree] = { + def compilationUnit(): Tree = checkNoEscapingPlaceholders: + def topstats(): List[Tree] = val ts = new ListBuffer[Tree] while (in.token == SEMI) nextToken() val start = in.offset - if (in.token == PACKAGE) { + if (in.token == PACKAGE) nextToken() - if (in.token == OBJECT) { + if (in.token == OBJECT) nextToken() ts += objectDef(start, Modifiers(Package)) - if (in.token != EOF) { + if (in.token != EOF) statSepOrEnd(ts, what = "toplevel definition") ts ++= topStatSeq() - } - } else val pkg = qualId() var continue = false @@ -4418,37 +4163,28 @@ object Parsers { if continue then statSepOrEnd(ts, what = "toplevel definition") ts ++= topStatSeq() - } else ts ++= topStatSeq(outermost = true) ts.toList - } - topstats() match { + topstats() match case List(stat @ PackageDef(_, _)) => stat case Nil => EmptyTree // without this case we'd get package defs without positions case stats => PackageDef(Ident(nme.EMPTY_PACKAGE), stats) - } - } - } /** OutlineParser parses top-level declarations in `source` to find declared classes, ignoring their bodies (which * must only have balanced braces). This is used to map class names to defining sources. */ - class OutlineParser(source: SourceFile)(using Context) extends Parser(source, allowRewrite = false) with OutlineParserCommon { + class OutlineParser(source: SourceFile)(using Context) extends Parser(source, allowRewrite = false) with OutlineParserCommon: def skipBracesHook(): Option[Tree] = if (in.token == XMLSTART) Some(xmlLiteral()) else None - override def blockExpr(inStatSeq: Boolean): Tree = { + override def blockExpr(inStatSeq: Boolean): Tree = skipBraces() EmptyTree - } - override def templateBody(parents: List[Tree], rewriteWithColon: Boolean): (ValDef, List[Thicket]) = { + override def templateBody(parents: List[Tree], rewriteWithColon: Boolean): (ValDef, List[Thicket]) = skipBraces() (EmptyValDef, List(EmptyTree)) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index dc4928439112..4c430cc33901 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -23,7 +23,7 @@ import reporting.{NoProfile, Profile, Message} import java.util.Objects -object Scanners { +object Scanners: /** Offset into source character array */ type Offset = Int @@ -35,7 +35,7 @@ object Scanners { private val identity: IndentWidth => IndentWidth = Predef.identity - trait TokenData { + trait TokenData: /** the next token */ var token: Token = EMPTY @@ -60,7 +60,7 @@ object Scanners { /** the base of a number */ var base: Int = 0 - def copyFrom(td: TokenData): this.type = { + def copyFrom(td: TokenData): this.type = this.token = td.token this.offset = td.offset this.lastOffset = td.lastOffset @@ -69,7 +69,6 @@ object Scanners { this.strVal = td.strVal this.base = td.base this - } def saveCopy: TokenData = newTokenData.copyFrom(this) @@ -93,11 +92,10 @@ object Scanners { def isArrow = token == ARROW || token == CTXARROW - } def newTokenData: TokenData = new TokenData {} - abstract class ScannerCommon(source: SourceFile)(using Context) extends CharArrayReader with TokenData { + abstract class ScannerCommon(source: SourceFile)(using Context) extends CharArrayReader with TokenData: val buf: Array[Char] = source.content def nextToken(): Unit @@ -112,21 +110,19 @@ object Scanners { error(msg.toMessage, off) /** Generate an error at the given offset */ - def error(msg: Message, off: Offset = offset): Unit = { + def error(msg: Message, off: Offset = offset): Unit = errorButContinue(msg, off) token = ERROR errOffset = off - } def errorButContinue(msg: Message, off: Offset = offset): Unit = report.error(msg, sourcePos(off)) /** signal an error where the input ended in the middle of a token */ - def incompleteInputError(msg: Message): Unit = { + def incompleteInputError(msg: Message): Unit = report.incompleteInputError(msg, sourcePos()) token = EOF errOffset = offset - } def sourcePos(off: Offset = offset): SourcePosition = source.atSpan(Span(off)) @@ -173,9 +169,8 @@ object Scanners { def checkNoTrailingSeparator(): Unit = if (!litBuf.isEmpty && isNumberSeparator(litBuf.last)) errorButContinue(em"trailing separator is not allowed", offset + litBuf.length - 1) - } - class Scanner(source: SourceFile, override val startFrom: Offset = 0, profile: Profile = NoProfile, allowRewrite: Boolean = true, allowIndent: Boolean = true)(using Context) extends ScannerCommon(source) { + class Scanner(source: SourceFile, override val startFrom: Offset = 0, profile: Profile = NoProfile, allowRewrite: Boolean = true, allowIndent: Boolean = true)(using Context) extends ScannerCommon(source): val keepComments = !ctx.settings.YdropComments.value /** A switch whether operators at the start of lines can be infix operators */ @@ -197,16 +192,15 @@ object Scanners { || (migrateTo3 && !ctx.settings.indent.value) val indentSyntax = ((if (Config.defaultIndent) !noindentSyntax else ctx.settings.indent.value) - || rewriteNoIndent) + || rewriteNoIndent) && allowIndent - if (rewrite) { + if (rewrite) val s = ctx.settings val rewriteTargets = List(s.newSyntax, s.oldSyntax, s.indent, s.noindent) val enabled = rewriteTargets.filter(_.value) if (enabled.length > 1) error(em"illegal combination of -rewrite targets: ${enabled(0).name} and ${enabled(1).name}") - } private var myLanguageImportContext: Context = ctx def languageImportContext = myLanguageImportContext @@ -238,14 +232,12 @@ object Scanners { /** Return a list of all the comment positions */ def commentSpans: List[Span] = commentPosBuf.toList - private def addComment(comment: Comment): Unit = { + private def addComment(comment: Comment): Unit = val lookahead = lookaheadReader() - def nextPos: Int = (lookahead.getc(): @switch) match { + def nextPos: Int = (lookahead.getc(): @switch) match case ' ' | '\t' | CR | LF | FF => nextPos case _ => lookahead.charOffset - 1 - } docstringMap = docstringMap + (nextPos -> comment) - } /** Returns the closest docstring preceding the position supplied */ def getDocComment(pos: Int): Option[Comment] = docstringMap.get(pos) @@ -321,25 +313,22 @@ object Scanners { /** Are we directly in a multiline string interpolation expression? * @pre inStringInterpolation */ - private def inMultiLineInterpolation = currentRegion match { + private def inMultiLineInterpolation = currentRegion match case InString(multiLine, _) => multiLine case _ => false - } /** Are we in a `${ }` block? such that RBRACE exits back into multiline string. */ private def inMultiLineInterpolatedExpression = - currentRegion match { + currentRegion match case InBraces(InString(true, _)) => true case _ => false - } /** read next token and return last offset */ - def skipToken(): Offset = { + def skipToken(): Offset = val off = offset nextToken() off - } def skipToken[T](result: T): T = nextToken() @@ -349,7 +338,7 @@ object Scanners { while !matches(currentRegion) && !currentRegion.isOutermost do currentRegion = currentRegion.enclosing - def adjustSepRegions(lastToken: Token): Unit = (lastToken: @switch) match { + def adjustSepRegions(lastToken: Token): Unit = (lastToken: @switch) match case LPAREN | LBRACKET => currentRegion = InParens(lastToken, currentRegion) case LBRACE => @@ -358,21 +347,18 @@ object Scanners { dropUntil(_.isInstanceOf[InBraces]) if !currentRegion.isOutermost then currentRegion = currentRegion.enclosing case RPAREN | RBRACKET => - currentRegion match { + currentRegion match case InParens(prefix, outer) if prefix + 1 == lastToken => currentRegion = outer case _ => - } case OUTDENT => currentRegion match case r: Indented => currentRegion = r.enclosing case _ => case STRINGLIT => - currentRegion match { + currentRegion match case InString(_, outer) => currentRegion = outer case _ => - } case _ => - } /** Read a token or copy it from `next` tokenData */ private def getNextToken(lastToken: Token): Unit = @@ -404,12 +390,11 @@ object Scanners { print(s"[$show${if isInstanceOf[LookaheadScanner] then "(LA)" else ""}]") /** Insert `token` at assumed `offset` in front of current one. */ - def insert(token: Token, offset: Int) = { + def insert(token: Token, offset: Int) = assert(next.token == EMPTY, next) next.copyFrom(this) this.offset = offset this.token = token - } /** A leading symbolic or backquoted identifier is treated as an infix operator if * - it does not follow a blank line, and @@ -481,18 +466,16 @@ object Scanners { import IndentWidth.{Run, Conc} def recur(idx: Int, ch: Char, n: Int, k: IndentWidth => IndentWidth): IndentWidth = if (idx < 0) k(Run(ch, n)) - else { + else val nextChar = buf(idx) if (nextChar == LF) k(Run(ch, n)) else if (nextChar == ' ' || nextChar == '\t') if (nextChar == ch) recur(idx - 1, ch, n + 1, k) - else { + else val k1: IndentWidth => IndentWidth = if (n == 0) k else iw => k(Conc(iw, Run(ch, n))) recur(idx - 1, nextChar, 1, k1) - } else recur(idx - 1, ' ', 0, identity) - } recur(offset - 1, ' ', 0, identity) end indentWidth @@ -708,14 +691,13 @@ object Scanners { * SEMI + ELSE => ELSE, COLON following id/)/] => COLONfollow * - Insert missing OUTDENTs at EOF */ - def postProcessToken(lastToken: Token, lastName: SimpleName): Unit = { - def fuse(tok: Int) = { + def postProcessToken(lastToken: Token, lastName: SimpleName): Unit = + def fuse(tok: Int) = token = tok offset = prev.offset lastOffset = prev.lastOffset lineOffset = prev.lineOffset - } - (token: @switch) match { + (token: @switch) match case CASE => peekAhead() if (token == CLASS) fuse(CASECLASS) @@ -752,8 +734,6 @@ object Scanners { case EOF => if !source.maybeIncomplete then closeIndented() case _ => - } - } protected def isEndMarker: Boolean = if indentSyntax && isAfterLineEnd then @@ -774,16 +754,14 @@ object Scanners { * A blank line consists only of characters <= ' '. * @pre afterLineEnd(). */ - private def pastBlankLine: Boolean = { + private def pastBlankLine: Boolean = val end = offset def recur(idx: Offset, isBlank: Boolean): Boolean = - idx < end && { + idx < end `&&`: val ch = buf(idx) if (ch == LF || ch == FF) isBlank || recur(idx + 1, true) else recur(idx + 1, isBlank && ch <= ' ') - } recur(lastOffset, false) - } import Character.{isHighSurrogate, isLowSurrogate, isUnicodeIdentifierPart, isUnicodeIdentifierStart, isValidCodePoint, toCodePoint} @@ -794,7 +772,7 @@ object Scanners { // true means supplementary chars were put to buffer. // strict to require low surrogate (if not in string literal). private def isSupplementary(high: Char, test: Int => Boolean, strict: Boolean = true): Boolean = - isHighSurrogate(high) && { + isHighSurrogate(high) `&&`: var res = false val low = lookaheadChar() if isLowSurrogate(low) then @@ -815,24 +793,21 @@ object Scanners { else error(em"illegal character '${toUnicode(high)}' missing low surrogate") res - } private def atSupplementary(ch: Char, f: Int => Boolean): Boolean = - isHighSurrogate(ch) && { + isHighSurrogate(ch) `&&`: val hi = ch val lo = lookaheadChar() - isLowSurrogate(lo) && { + isLowSurrogate(lo) `&&`: val codepoint = toCodePoint(hi, lo) isValidCodePoint(codepoint) && f(codepoint) - } - } /** read next token, filling TokenData fields of Scanner. */ - protected final def fetchToken(): Unit = { + protected final def fetchToken(): Unit = offset = charOffset - 1 lineOffset = if (lastOffset < lineStartOffset) lineStartOffset else -1 name = null - (ch: @switch) match { + (ch: @switch) match case ' ' | '\t' | CR | LF | FF => nextChar() fetchToken() @@ -854,18 +829,16 @@ object Scanners { if (ch == '"' && token == IDENTIFIER) token = INTERPOLATIONID case '<' => // is XMLSTART? - def fetchLT() = { + def fetchLT() = val last = if (charOffset >= 2) buf(charOffset - 2) else ' ' nextChar() - last match { + last match case ' ' | '\t' | '\n' | '{' | '(' | '>' if xml.Utility.isNameStart(ch) || ch == '!' || ch == '?' => token = XMLSTART case _ => // Console.println("found '<', but last is '" + in.last +"'"); // DEBUG putChar('<') getOperatorRest() - } - } fetchLT() case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | /*'<' | */ @@ -877,21 +850,18 @@ object Scanners { case '/' => if (skipComment()) fetchToken() - else { + else putChar('/') getOperatorRest() - } case '0' => - def fetchLeadingZero(): Unit = { + def fetchLeadingZero(): Unit = nextChar() - ch match { + ch match case 'x' | 'X' => base = 16 ; nextChar() //case 'b' | 'B' => base = 2 ; nextChar() case _ => base = 10 ; putChar('0') - } if (base != 10 && !isNumberSeparator(ch) && digit2int(ch, base) < 0) error(em"invalid literal number") - } fetchLeadingZero() getNumber() case '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => @@ -900,54 +870,44 @@ object Scanners { case '`' => getBackquotedIdent() case '\"' => - def stringPart(multiLine: Boolean) = { + def stringPart(multiLine: Boolean) = getStringPart(multiLine) currentRegion = InString(multiLine, currentRegion) - } def fetchDoubleQuote() = - if (token == INTERPOLATIONID) { + if (token == INTERPOLATIONID) nextRawChar() - if (ch == '\"') { - if (lookaheadChar() == '\"') { + if (ch == '\"') + if (lookaheadChar() == '\"') nextRawChar() nextRawChar() stringPart(multiLine = true) - } - else { + else nextChar() token = STRINGLIT strVal = "" - } - } - else { + else stringPart(multiLine = false) - } - } - else { + else nextChar() - if (ch == '\"') { + if (ch == '\"') nextChar() - if (ch == '\"') { + if (ch == '\"') nextRawChar() getRawStringLit() - } - else { + else token = STRINGLIT strVal = "" - } - } else getStringLit() - } fetchDoubleQuote() case '\'' => - def fetchSingleQuote(): Unit = { + def fetchSingleQuote(): Unit = nextChar() if isIdentifierStart(ch) then charLitOr { getIdentRest(); QUOTEID } else if isOperatorPart(ch) && ch != '\\' then charLitOr { getOperatorRest(); QUOTEID } - else ch match { + else ch match case '{' | '[' | ' ' | '\t' if lookaheadChar() != '\'' => token = QUOTE case _ if !isAtEnd && ch != SU && ch != CR && ch != LF => @@ -961,14 +921,11 @@ object Scanners { else error(em"unclosed character literal") case _ => error(em"unclosed character literal") - } - } fetchSingleQuote() case '.' => nextChar() - if ('0' <= ch && ch <= '9') { + if ('0' <= ch && ch <= '9') putChar('.'); getFraction(); setStrVal() - } else token = DOT case ';' => @@ -990,10 +947,9 @@ object Scanners { nextChar(); token = RBRACKET case SU => if (isAtEnd) token = EOF - else { + else error(em"illegal character") nextChar() - } case _ => def fetchOther() = if ch == '\u21D2' then @@ -1020,38 +976,32 @@ object Scanners { error(em"illegal character '${toUnicode(ch)}'") nextChar() fetchOther() - } - } - private def skipComment(): Boolean = { + private def skipComment(): Boolean = def appendToComment(ch: Char) = if (keepComments) commentBuf.append(ch) - def nextChar() = { + def nextChar() = appendToComment(ch) Scanner.this.nextChar() - } - def skipLine(): Unit = { + def skipLine(): Unit = nextChar() if ((ch != CR) && (ch != LF) && (ch != SU)) skipLine() - } @tailrec def skipComment(): Unit = - if (ch == '/') { + if (ch == '/') nextChar() if (ch == '*') nestedComment() skipComment() - } - else if (ch == '*') { + else if (ch == '*') while ({ nextChar() ; ch == '*' }) () if (ch == '/') nextChar() else skipComment() - } else if (ch == SU) incompleteInputError(em"unclosed comment") else { nextChar(); skipComment() } def nestedComment() = { nextChar(); skipComment() } val start = lastCharOffset - def finishComment(): Boolean = { - if (keepComments) { + def finishComment(): Boolean = + if (keepComments) val pos = Span(start, charOffset - 1, start) val comment = Comment(pos, commentBuf.toString) commentBuf.clear() @@ -1062,19 +1012,15 @@ object Scanners { else // "forward" doc comments over normal ones getDocComment(start).foreach(addComment) - } true - } nextChar() if (ch == '/') { skipLine(); finishComment() } else if (ch == '*') { nextChar(); skipComment(); finishComment() } - else { + else // This was not a comment, remove the `/` from the buffer commentBuf.clear() false - } - } // Lookahead --------------------------------------------------------------- @@ -1093,10 +1039,9 @@ object Scanners { reset() next - class LookaheadScanner(val allowIndent: Boolean = false) extends Scanner(source, offset, allowIndent = allowIndent) { + class LookaheadScanner(val allowIndent: Boolean = false) extends Scanner(source, offset, allowIndent = allowIndent): override protected def initialCharBufferSize = 8 override def languageImportContext = Scanner.this.languageImportContext - } /** Skip matching pairs of `(...)` or `[...]` parentheses. * @pre The current token is `(` or `[` @@ -1109,32 +1054,29 @@ object Scanners { nextToken() /** Is the current token in a position where a modifier is allowed? */ - def inModifierPosition(): Boolean = { + def inModifierPosition(): Boolean = val lookahead = LookaheadScanner() while lookahead.nextToken() lookahead.isNewLine || lookahead.isSoftModifier do () modifierFollowers.contains(lookahead.token) - } // Identifiers --------------------------------------------------------------- - private def getBackquotedIdent(): Unit = { + private def getBackquotedIdent(): Unit = nextChar() getLitChars('`') - if (ch == '`') { + if (ch == '`') nextChar() finishNamedToken(BACKQUOTED_IDENT, target = this) if (name.length == 0) error(em"empty quoted identifier") else if (name == nme.WILDCARD) error(em"wildcard invalid as backquoted identifier") - } else error(em"unclosed quoted identifier") - } - @tailrec private def getIdentRest(): Unit = (ch: @switch) match { + @tailrec private def getIdentRest(): Unit = (ch: @switch) match case 'A' | 'B' | 'C' | 'D' | 'E' | 'F' | 'G' | 'H' | 'I' | 'J' | 'K' | 'L' | 'M' | 'N' | 'O' | @@ -1167,9 +1109,8 @@ object Scanners { getIdentRest() else finishNamed() - } - @tailrec private def getOperatorRest(): Unit = (ch: @switch) match { + @tailrec private def getOperatorRest(): Unit = (ch: @switch) match case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | @@ -1183,7 +1124,6 @@ object Scanners { if isSpecial(ch) then { putChar(ch); nextChar(); getOperatorRest() } else if isSupplementary(ch, isSpecial) then getOperatorRest() else finishNamed() - } private def getIdentOrOperatorRest(): Unit = if (isIdentifierPart(ch) || isSupplementary(ch, isIdentifierPart)) getIdentRest() else getOperatorRest() @@ -1208,60 +1148,51 @@ object Scanners { // Literals ----------------------------------------------------------------- - private def getStringLit() = { + private def getStringLit() = getLitChars('"') - if (ch == '"') { + if (ch == '"') setStrVal() nextChar() token = STRINGLIT - } else error(em"unclosed string literal") - } private def getRawStringLit(): Unit = - if (ch == '\"') { + if (ch == '\"') nextRawChar() - if (isTripleQuote()) { + if (isTripleQuote()) setStrVal() token = STRINGLIT - } else getRawStringLit() - } else if (ch == SU) incompleteInputError(em"unclosed multi-line string literal") - else { + else putChar(ch) nextRawChar() getRawStringLit() - } // for interpolated strings @tailrec private def getStringPart(multiLine: Boolean): Unit = if (ch == '"') - if (multiLine) { + if (multiLine) nextRawChar() - if (isTripleQuote()) { + if (isTripleQuote()) setStrVal() token = STRINGLIT - } else getStringPart(multiLine) - } - else { + else nextChar() setStrVal() token = STRINGLIT - } - else if (ch == '\\' && !multiLine) { + else if (ch == '\\' && !multiLine) putChar(ch) nextRawChar() if (ch == '"' || ch == '\\') putChar(ch) nextRawChar() getStringPart(multiLine) - } - else if (ch == '$') { + else if (ch == '$') def getInterpolatedIdentRest(hasSupplement: Boolean): Unit = @tailrec def loopRest(): Unit = if ch != SU && isUnicodeIdentifierPart(ch) then @@ -1286,15 +1217,13 @@ object Scanners { end getInterpolatedIdentRest nextRawChar() - if (ch == '$' || ch == '"') { + if (ch == '$' || ch == '"') putChar(ch) nextRawChar() getStringPart(multiLine) - } - else if (ch == '{') { + else if (ch == '{') setStrVal() token = STRINGPART - } else if isUnicodeIdentifierStart(ch) || ch == '_' then getInterpolatedIdentRest(hasSupplement = false) else if atSupplementary(ch, isUnicodeIdentifierStart) then @@ -1303,20 +1232,17 @@ object Scanners { error("invalid string interpolation: `$$`, `$\"`, `$`ident or `$`BlockExpr expected".toMessage, off = charOffset - 2) putChar('$') getStringPart(multiLine) - } - else { + else val isUnclosedLiteral = !isUnicodeEscape && (ch == SU || (!multiLine && (ch == CR || ch == LF))) if (isUnclosedLiteral) if (multiLine) incompleteInputError(em"unclosed multi-line string literal") else error(em"unclosed string literal") - else { + else putChar(ch) nextRawChar() getStringPart(multiLine) - } - } end getStringPart private def fetchStringPart(multiLine: Boolean) = @@ -1325,26 +1251,21 @@ object Scanners { getStringPart(multiLine) private def isTripleQuote(): Boolean = - if (ch == '"') { + if (ch == '"') nextRawChar() - if (ch == '"') { + if (ch == '"') nextChar() - while (ch == '"') { + while (ch == '"') putChar('"') nextChar() - } true - } - else { + else putChar('"') putChar('"') false - } - } - else { + else putChar('"') false - } /** Copy current character into cbuf, interpreting any escape sequences, * and advance to next character. Surrogate pairs are consumed (see check @@ -1421,108 +1342,91 @@ object Scanners { /** read fractional part and exponent of floating point number * if one is present. */ - protected def getFraction(): Unit = { + protected def getFraction(): Unit = token = DECILIT - while ('0' <= ch && ch <= '9' || isNumberSeparator(ch)) { + while ('0' <= ch && ch <= '9' || isNumberSeparator(ch)) putChar(ch) nextChar() - } checkNoTrailingSeparator() - if (ch == 'e' || ch == 'E') { + if (ch == 'e' || ch == 'E') val lookahead = lookaheadReader() lookahead.nextChar() if (lookahead.ch == '+' || lookahead.ch == '-') lookahead.nextChar() - if ('0' <= lookahead.ch && lookahead.ch <= '9' || isNumberSeparator(ch)) { + if ('0' <= lookahead.ch && lookahead.ch <= '9' || isNumberSeparator(ch)) putChar(ch) nextChar() - if (ch == '+' || ch == '-') { + if (ch == '+' || ch == '-') putChar(ch) nextChar() - } - while ('0' <= ch && ch <= '9' || isNumberSeparator(ch)) { + while ('0' <= ch && ch <= '9' || isNumberSeparator(ch)) putChar(ch) nextChar() - } checkNoTrailingSeparator() - } token = EXPOLIT - } - if (ch == 'd' || ch == 'D') { + if (ch == 'd' || ch == 'D') putChar(ch) nextChar() token = DOUBLELIT - } - else if (ch == 'f' || ch == 'F') { + else if (ch == 'f' || ch == 'F') putChar(ch) nextChar() token = FLOATLIT - } checkNoLetter() - } def checkNoLetter(): Unit = if (isIdentifierPart(ch) && ch >= ' ') error(em"Invalid literal number") /** Read a number into strVal and set base */ - protected def getNumber(): Unit = { - while (isNumberSeparator(ch) || digit2int(ch, base) >= 0) { + protected def getNumber(): Unit = + while (isNumberSeparator(ch) || digit2int(ch, base) >= 0) putChar(ch) nextChar() - } checkNoTrailingSeparator() token = INTLIT - if (base == 10 && ch == '.') { + if (base == 10 && ch == '.') val lch = lookaheadChar() - if ('0' <= lch && lch <= '9') { + if ('0' <= lch && lch <= '9') putChar('.') nextChar() getFraction() - } - } - else (ch: @switch) match { + else (ch: @switch) match case 'e' | 'E' | 'f' | 'F' | 'd' | 'D' => if (base == 10) getFraction() case 'l' | 'L' => nextChar() token = LONGLIT case _ => - } checkNoTrailingSeparator() setStrVal() - } - private def finishCharLit(): Unit = { + private def finishCharLit(): Unit = nextChar() token = CHARLIT setStrVal() - } /** Parse character literal if current character is followed by \', * or follow with given op and return a symbol literal token */ - def charLitOr(op: => Token): Unit = { + def charLitOr(op: => Token): Unit = putChar(ch) nextChar() if (ch == '\'') finishCharLit() - else { + else token = op strVal = Objects.toString(name) litBuf.clear() - } - } override def toString: String = - showTokenDetailed(token) + { + showTokenDetailed(token) `+`: if identifierTokens.contains(token) then s" $name" else if literalTokens.contains(token) then s" $strVal" else "" - } - def show: String = token match { + def show: String = token match case IDENTIFIER | BACKQUOTED_IDENT => s"id($name)" case CHARLIT => s"char($strVal)" case INTLIT => s"int($strVal, base = $base)" @@ -1539,22 +1443,19 @@ object Scanners { case COLONfollow | COLONeol => "':'" case _ => if debugTokenStream then showTokenDetailed(token) else showToken(token) - } /* Resume normal scanning after XML */ - def resume(lastTokenData: TokenData): Unit = { + def resume(lastTokenData: TokenData): Unit = this.copyFrom(lastTokenData) if (next.token != EMPTY && !ctx.reporter.hasErrors) error(em"unexpected end of input: possible missing '}' in XML block") nextToken() - } /* Initialization: read first char, then first token */ nextChar() nextToken() currentRegion = topLevelRegion(indentWidth(offset)) - } end Scanner /** A Region indicates what encloses the current token. It can be one of the following @@ -1660,7 +1561,7 @@ object Scanners { def topLevelRegion(width: IndentWidth) = Indented(width, EMPTY, null) - enum IndentWidth { + enum IndentWidth: case Run(ch: Char, n: Int) case Conc(l: IndentWidth, r: Run) @@ -1700,25 +1601,20 @@ object Scanners { case Conc(l2, r2) => l1 == l2 && r1.isClose(r2) case _ => false - def toPrefix: String = this match { + def toPrefix: String = this match case Run(ch, n) => ch.toString * n case Conc(l, r) => l.toPrefix ++ r.toPrefix - } - override def toString: String = { - def kind(ch: Char) = ch match { + override def toString: String = + def kind(ch: Char) = ch match case ' ' => "space" case '\t' => "tab" case _ => s"'$ch'-character" - } - this match { + this match case Run(ch, n) => s"$n ${kind(ch)}${if (n == 1) "" else "s"}" case Conc(l, r) => s"$l, $r" - } - } - } - object IndentWidth { + object IndentWidth: private inline val MaxCached = 40 private val spaces = Array.tabulate(MaxCached + 1)(new Run(' ', _)) private val tabs = Array.tabulate(MaxCached + 1)(new Run('\t', _)) @@ -1729,9 +1625,7 @@ object Scanners { else new Run(ch, n) val Zero = Run(' ', 0) - } // ------------- keyword configuration ----------------------------------- private val (lastKeywordStart, kwArray) = buildKeywordArray(keywords) -} diff --git a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala index d11db73b0455..8eb49387c7af 100644 --- a/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/ScriptParsers.scala @@ -43,11 +43,11 @@ import Parsers._ * * */ -object ScriptParsers { +object ScriptParsers: import ast.untpd._ - class ScriptParser(source: SourceFile)(using Context) extends Parser(source) { + class ScriptParser(source: SourceFile)(using Context) extends Parser(source): /** This is the parse entry point for code which is not self-contained, e.g. * a script which is a series of template statements. They will be @@ -55,8 +55,6 @@ object ScriptParsers { * by compilationUnit(). */ override def parse(): Tree = unsupported("parse") - } -} /* TODO: reinstantiate diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index dba0ad3fa2ee..eb9ace169e16 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -8,7 +8,7 @@ import collection.immutable.BitSet import core.Decorators._ import core.StdNames.nme -abstract class TokensCommon { +abstract class TokensCommon: def maxToken: Int type Token = Int @@ -18,11 +18,10 @@ abstract class TokensCommon { val tokenString, debugString: Array[String] = new Array[String](maxToken + 1) - def enter(token: Int, str: String, debugStr: String = ""): Unit = { + def enter(token: Int, str: String, debugStr: String = ""): Unit = assert(tokenString(token) == null) tokenString(token) = str debugString(token) = if (debugStr.isEmpty) str else debugStr - } /** special tokens */ inline val EMPTY = 0; enter(EMPTY, "") // a missing token, used in lookahead @@ -127,7 +126,7 @@ abstract class TokensCommon { inline val firstParen = LPAREN inline val lastParen = OUTDENT - def buildKeywordArray(keywords: TokenSet): (Int, Array[Int]) = { + def buildKeywordArray(keywords: TokenSet): (Int, Array[Int]) = def start(tok: Token) = tokenString(tok).toTermName.asSimpleName.start def sourceKeywords = keywords.toList.filter { (kw: Token) => val ts = tokenString(kw) @@ -139,10 +138,8 @@ abstract class TokensCommon { val arr = Array.fill(lastKeywordStart + 1)(IDENTIFIER) for (kw <- sourceKeywords) arr(start(kw)) = kw (lastKeywordStart, arr) - } -} -object Tokens extends TokensCommon { +object Tokens extends TokensCommon: inline val minToken = EMPTY final def maxToken: Int = XMLSTART @@ -223,8 +220,8 @@ object Tokens extends TokensCommon { final val canStartExprTokens3: TokenSet = atomicExprTokens - | openParensTokens - | BitSet(INDENT, QUOTE, IF, WHILE, FOR, NEW, TRY, THROW) + | openParensTokens + | BitSet(INDENT, QUOTE, IF, WHILE, FOR, NEW, TRY, THROW) final val canStartExprTokens2: TokenSet = canStartExprTokens3 | BitSet(DO) @@ -293,8 +290,6 @@ object Tokens extends TokensCommon { def showTokenDetailed(token: Int): String = debugString(token) - def showToken(token: Int): String = { + def showToken(token: Int): String = val str = tokenString(token) if isKeyword(token) || token == COLONfollow || token == COLONeol then s"'$str'" else str - } -} diff --git a/compiler/src/dotty/tools/dotc/parsing/package.scala b/compiler/src/dotty/tools/dotc/parsing/package.scala index ee3ecda60aee..19cd538ef929 100644 --- a/compiler/src/dotty/tools/dotc/parsing/package.scala +++ b/compiler/src/dotty/tools/dotc/parsing/package.scala @@ -5,7 +5,7 @@ import core.Names.Name import core.StdNames.nme import core.NameOps._ -package object parsing { +package object parsing: /** * Compute the precedence of infix operator `operator` according to the SLS [§ 6.12.3][SLS]. @@ -16,11 +16,11 @@ package object parsing { */ def precedence(operator: Name): Int = if (operator eq nme.ERROR) -1 - else { + else val firstCh = operator.firstCodePoint if (isScalaLetter(firstCh)) 1 else if (operator.isOpAssignmentName) 0 - else firstCh match { + else firstCh match case '|' => 2 case '^' => 3 case '&' => 4 @@ -30,11 +30,8 @@ package object parsing { case '+' | '-' => 8 case '*' | '/' | '%' => 9 case _ => 10 - } - } def minPrec: Int = 0 def minInfixPrec: Int = 1 def maxPrec: Int = 11 -} diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala index 0f7d426fbd28..310e05abc58f 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParserCommon.scala @@ -17,7 +17,7 @@ import scala.collection.BufferedIterator * between the library level XML parser and the compiler's. * All members should be accessed through those. */ -private[dotty] trait MarkupParserCommon { +private[dotty] trait MarkupParserCommon: protected def unreachable: Nothing = scala.sys.error("Cannot be reached.") // type HandleType // MarkupHandler, SymbolicXMLBuilder @@ -34,64 +34,56 @@ private[dotty] trait MarkupParserCommon { * [40] STag ::= '<' Name { S Attribute } [S] * [44] EmptyElemTag ::= '<' Name { S Attribute } [S] */ - protected def xTag(pscope: NamespaceType): (String, AttributesType) = { + protected def xTag(pscope: NamespaceType): (String, AttributesType) = val name = xName xSpaceOpt() (name, mkAttributes(name, pscope)) - } /** '?' {Char})]'?>' * * see [15] */ - def xProcInstr: ElementType = { + def xProcInstr: ElementType = val n = xName xSpaceOpt() xTakeUntil(mkProcInstr(_, n, _), () => tmppos, "?>") - } /** attribute value, terminated by either `'` or `"`. value may not contain `<`. @param endCh either `'` or `"` */ - def xAttributeValue(endCh: Char): String = { + def xAttributeValue(endCh: Char): String = val buf = new StringBuilder - while (ch != endCh) { + while (ch != endCh) // well-formedness constraint if (ch == '<') return errorAndResult("'<' not allowed in attrib value", "") else if (ch == SU) truncatedError("") else buf append ch_returning_nextch - } ch_returning_nextch // @todo: normalize attribute value buf.toString - } - def xAttributeValue(): String = { + def xAttributeValue(): String = val str = xAttributeValue(ch_returning_nextch) // well-formedness constraint normalizeAttributeValue(str) - } - private def takeUntilChar(it: Iterator[Char], end: Char): String = { + private def takeUntilChar(it: Iterator[Char], end: Char): String = val buf = new StringBuilder - while (it.hasNext) it.next() match { + while (it.hasNext) it.next() match case `end` => return buf.toString case ch => buf append ch - } scala.sys.error("Expected '%s'".format(end)) - } /** [42] '<' xmlEndTag ::= '<' '/' Name S? '>' */ - def xEndTag(startName: String): Unit = { + def xEndTag(startName: String): Unit = xToken('/') if (xName != startName) errorNoEnd(startName) xSpaceOpt() xToken('>') - } /** actually, Name ::= (Letter | '_' | ':') (NameChar)* but starting with ':' cannot happen * Name ::= (Letter | '_') (NameChar)* @@ -101,7 +93,7 @@ private[dotty] trait MarkupParserCommon { * pre-condition: ch != ':' // assured by definition of XMLSTART token * post-condition: name does neither start, nor end in ':' */ - def xName: String = { + def xName: String = if (ch == SU) truncatedError("") else if (!isNameStart(ch)) @@ -111,14 +103,12 @@ private[dotty] trait MarkupParserCommon { while ({ buf append ch_returning_nextch ; isNameChar(ch) }) () - if (buf.last == ':') { + if (buf.last == ':') reportSyntaxError( "name cannot end in ':'" ) buf.toString dropRight 1 - } else buf.toString - } - private def attr_unescape(s: String) = s match { + private def attr_unescape(s: String) = s match case "lt" => "<" case "gt" => ">" case "amp" => "&" @@ -126,12 +116,11 @@ private[dotty] trait MarkupParserCommon { case "quot" => "\"" case "quote" => "\"" case _ => "&" + s + ";" - } /** Replaces only character references right now. * see spec 3.3.3 */ - private def normalizeAttributeValue(attval: String): String = { + private def normalizeAttributeValue(attval: String): String = val buf = new StringBuilder val it = attval.iterator.buffered @@ -143,7 +132,6 @@ private[dotty] trait MarkupParserCommon { }) buf.toString - } /** CharRef ::= "&#" '0'..'9' {'0'..'9'} ";" * | "&#x" '0'..'9'|'A'..'F'|'a'..'f' { hexdigit } ";" @@ -153,10 +141,9 @@ private[dotty] trait MarkupParserCommon { def xCharRef(ch: () => Char, nextch: () => Unit): String = Utility.parseCharRef(ch, nextch, reportSyntaxError _, truncatedError _) - def xCharRef(it: Iterator[Char]): String = { + def xCharRef(it: Iterator[Char]): String = var c = it.next() Utility.parseCharRef(() => c, () => { c = it.next() }, reportSyntaxError _, truncatedError _) - } def xCharRef: String = xCharRef(() => ch, () => nextch()) @@ -185,15 +172,13 @@ private[dotty] trait MarkupParserCommon { def truncatedError(msg: String): Nothing def errorNoEnd(tag: String): Nothing - protected def errorAndResult[T](msg: String, x: T): T = { + protected def errorAndResult[T](msg: String, x: T): T = reportSyntaxError(msg) x - } - def xToken(that: Char): Unit = { + def xToken(that: Char): Unit = if (ch == that) nextch() else xHandleError(that, "'%s' expected instead of '%s'".format(that, ch)) - } def xToken(that: Seq[Char]): Unit = { that foreach xToken } /** scan [S] '=' [S]*/ @@ -211,11 +196,10 @@ private[dotty] trait MarkupParserCommon { def returning[T](x: T)(f: T => Unit): T = { f(x); x } /** Execute body with a variable saved and restored after execution */ - def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = { + def saving[A, B](getter: A, setter: A => Unit)(body: => B): B = val saved = getter try body finally setter(saved) - } /** Take characters from input stream until given String "until" * is seen. Once seen, the accumulated characters are passed @@ -225,12 +209,11 @@ private[dotty] trait MarkupParserCommon { handler: (PositionType, String) => T, positioner: () => PositionType, until: String): T = - { val sb = new StringBuilder val head = until.head val rest = until.tail - while (true) { + while (true) if (ch == head && peek(rest)) return handler(positioner(), sb.toString) else if (ch == SU) @@ -238,9 +221,7 @@ private[dotty] trait MarkupParserCommon { sb append ch nextch() - } unreachable - } /** Create a non-destructive lookahead reader and see if the head * of the input would match the given String. If yes, return true @@ -248,9 +229,7 @@ private[dotty] trait MarkupParserCommon { * and leave input unchanged. */ private def peek(lookingFor: String): Boolean = - (lookahead() take lookingFor.length sameElements lookingFor.iterator) && { + (lookahead() take lookingFor.length sameElements lookingFor.iterator) `&&`: // drop the chars from the real reader (all lookahead + orig) (0 to lookingFor.length) foreach (_ => nextch()) true - } -} diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala index 34a179c1be01..e39e22cb6420 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/MarkupParsers.scala @@ -36,23 +36,20 @@ import Utility._ * @author Burak Emir * @version 1.0 */ -object MarkupParsers { +object MarkupParsers: import ast.untpd._ - case object MissingEndTagControl extends ControlThrowable { + case object MissingEndTagControl extends ControlThrowable: override def getMessage: String = "start tag was here: " - } - case object ConfusedAboutBracesControl extends ControlThrowable { + case object ConfusedAboutBracesControl extends ControlThrowable: override def getMessage: String = " I encountered a '}' where I didn't expect one, maybe this tag isn't closed <" - } - case object TruncatedXMLControl extends ControlThrowable { + case object TruncatedXMLControl extends ControlThrowable: override def getMessage: String = "input ended while parsing XML" - } - class MarkupParser(parser: Parser, final val preserveWS: Boolean)(using Context) extends MarkupParserCommon { + class MarkupParser(parser: Parser, final val preserveWS: Boolean)(using Context) extends MarkupParserCommon: import Tokens.{ LBRACE, RBRACE } @@ -83,9 +80,8 @@ object MarkupParsers { /** this method assign the next character to ch and advances in input */ def nextch(): Unit = { input.nextChar() } - protected def ch_returning_nextch: Char = { + protected def ch_returning_nextch: Char = val result = ch; input.nextChar(); result - } def mkProcInstr(position: Span, name: String, text: String): ElementType = parser.symbXMLBuilder.procInstr(position, name, text) @@ -96,23 +92,20 @@ object MarkupParsers { private def debugLastPos = debugLastStartElement.head._1 private def debugLastElem = debugLastStartElement.head._2 - private def errorBraces() = { + private def errorBraces() = reportSyntaxError("in XML content, please use '}}' to express '}'") throw ConfusedAboutBracesControl - } - def errorNoEnd(tag: String): Nothing = { + def errorNoEnd(tag: String): Nothing = reportSyntaxError("expected closing tag of " + tag) throw MissingEndTagControl - } /** checks whether next character starts a Scala block, if yes, skip it. * @return true if next character starts a scala block */ - def xCheckEmbeddedBlock: Boolean = { + def xCheckEmbeddedBlock: Boolean = // attentions, side-effect, used in xText xEmbeddedBlock = (ch == '{') && { nextch(); (ch != '{') } xEmbeddedBlock - } /** parse attribute and add it to listmap * [41] Attributes ::= { S Name Eq AttValue } @@ -120,24 +113,23 @@ object MarkupParsers { * | `"` { _ } `"` * | `{` scalablock `}` */ - def xAttributes: mutable.LinkedHashMap[String, Tree] = { + def xAttributes: mutable.LinkedHashMap[String, Tree] = val aMap = mutable.LinkedHashMap[String, Tree]() - while (isNameStart(ch)) { + while (isNameStart(ch)) val start = curOffset val key = xName xEQ() val delim = ch val mid = curOffset - val value: Tree = ch match { + val value: Tree = ch match case '"' | '\'' => val tmp = xAttributeValue(ch_returning_nextch) try handle.parseAttribute(Span(start, curOffset, mid), tmp) - catch { + catch case e: RuntimeException => errorAndResult("error parsing attribute value", parser.errorTermTree(parser.in.offset)) - } case '{' => nextch() @@ -146,7 +138,6 @@ object MarkupParsers { throw TruncatedXMLControl case _ => errorAndResult("' or \" delimited attribute value or '{' scala-expr '}' expected", Literal(Constant(""))) - } // well-formedness constraint: unique attribute names if (aMap contains key) reportSyntaxError("attribute %s may only be defined once" format key) @@ -154,41 +145,36 @@ object MarkupParsers { aMap(key) = value if (ch != '/' && ch != '>') xSpace() - } aMap - } /** '"{char} ) ']]>' * * see [15] */ - def xCharData: Tree = { + def xCharData: Tree = val start = curOffset xToken("[CDATA[") val mid = curOffset xTakeUntil(handle.charData, () => Span(start, curOffset, mid), "]]>") - } - def xUnparsed: Tree = { + def xUnparsed: Tree = val start = curOffset xTakeUntil(handle.unparsed, () => Span(start, curOffset, start), "") - } /** Comment ::= '' * * see [15] */ - def xComment: Tree = { + def xComment: Tree = val start = curOffset - 2 // Rewinding to include " Span(start, curOffset, start), "-->") - } - def appendText(span: Span, ts: Buffer[Tree], txt: String): Unit = { + def appendText(span: Span, ts: Buffer[Tree], txt: String): Unit = def append(t: String) = ts append handle.text(span, t) if (preserveWS) append(txt) - else { + else val sb = new StringBuilder() txt foreach { c => @@ -198,15 +184,13 @@ object MarkupParsers { val trimmed = sb.toString.trim if (!trimmed.isEmpty) append(trimmed) - } - } /** adds entity/character to ts as side-effect * @precond ch == '&' */ - def content_AMP(ts: ArrayBuffer[Tree]): Unit = { + def content_AMP(ts: ArrayBuffer[Tree]): Unit = nextch() - val toAppend = ch match { + val toAppend = ch match case '#' => // CharacterRef nextch() val theChar = handle.text(tmppos, xCharRef) @@ -216,10 +200,8 @@ object MarkupParsers { val n = xName xToken(';') handle.entityRef(tmppos, n) - } ts append toAppend - } /** * @precond ch == '{' @@ -235,28 +217,26 @@ object MarkupParsers { * @param ts ... * @return ... */ - private def content_LT(ts: ArrayBuffer[Tree]): Boolean = { + private def content_LT(ts: ArrayBuffer[Tree]): Boolean = if (ch == '/') return true // end tag - val toAppend = ch match { + val toAppend = ch match case '!' => nextch() ; if (ch =='[') xCharData else xComment // CDATA or Comment case '?' => nextch() ; xProcInstr // PI case _ => element // child node - } ts append toAppend false - } - def content: Buffer[Tree] = { + def content: Buffer[Tree] = val ts = new ArrayBuffer[Tree] - while (true) { + while (true) if (xEmbeddedBlock) ts append xEmbeddedExpr - else { + else tmppos = Span(curOffset) - ch match { + ch match // end tag, cdata, comment, pi or child node case '<' => nextch() ; if (content_LT(ts)) return ts // either the character '{' or an embedded scala block } @@ -266,23 +246,18 @@ object MarkupParsers { case SU => return ts // text content - here xEmbeddedBlock might be true case _ => appendText(tmppos, ts, xText) - } - } - } unreachable - } /** '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag * | xmlTag1 '/' '>' */ - def element: Tree = { + def element: Tree = val start = curOffset val (qname, attrMap) = xTag(()) - if (ch == '/') { // empty element + if (ch == '/') // empty element xToken("/>") handle.element(Span(start, curOffset, start), qname, attrMap, true, new ListBuffer[Tree]) - } - else { // handle content + else // handle content xToken('>') if (qname == "xml:unparsed") return xUnparsed @@ -292,66 +267,56 @@ object MarkupParsers { xEndTag(qname) debugLastStartElement = debugLastStartElement.tail val span = Span(start, curOffset, start) - qname match { + qname match case "xml:group" => handle.group(span, ts) case _ => handle.element(span, qname, attrMap, false, ts) - } - } - } /** parse character data. * precondition: xEmbeddedBlock == false (we are not in a scala block) */ - private def xText: String = { + private def xText: String = assert(!xEmbeddedBlock, "internal error: encountered embedded block") val buf = new StringBuilder def done = buf.toString - while (ch != SU) { - if (ch == '}') { + while (ch != SU) + if (ch == '}') if (charComingAfter(nextch()) == '}') nextch() else errorBraces() - } buf append ch nextch() if (xCheckEmbeddedBlock || ch == '<' || ch == '&') return done - } done - } /** Some try/catch/finally logic used by xLiteral and xLiteralPattern. */ - inline private def xLiteralCommon(f: () => Tree, ifTruncated: String => Unit): Tree = { + inline private def xLiteralCommon(f: () => Tree, ifTruncated: String => Unit): Tree = assert(parser.in.token == Tokens.XMLSTART) val saved = parser.in.saveCopy var output: Tree = null.asInstanceOf[Tree] try output = f() - catch { + catch case c @ TruncatedXMLControl => ifTruncated(c.getMessage) case c @ (MissingEndTagControl | ConfusedAboutBracesControl) => parser.syntaxError(em"${c.getMessage}$debugLastElem>", debugLastPos) case _: ArrayIndexOutOfBoundsException => parser.syntaxError(em"missing end tag in XML literal for <$debugLastElem>", debugLastPos) - } finally parser.in.resume(saved) if (output == null) parser.errorTermTree(parser.in.offset) else output - } /** Use a lookahead parser to run speculative body, and return the first char afterward. */ - private def charComingAfter(body: => Unit): Char = { - try { + private def charComingAfter(body: => Unit): Char = + try input = input.lookaheadReader() body ch - } finally input = parser.in - } /** xLiteral = element { element } * @return Scala representation of this xml literal @@ -367,19 +332,17 @@ object MarkupParsers { content_LT(ts) // parse more XML? - if (charComingAfter(xSpaceOpt()) == '<') { - while { + if (charComingAfter(xSpaceOpt()) == '<') + while xSpaceOpt() nextch() ts.append(element) charComingAfter(xSpaceOpt()) == '<' - } do () + do () handle.makeXMLseq(Span(start, curOffset, start), ts) - } - else { + else assert(ts.length == 1, "Require one tree") ts(0) - } }, msg => parser.incompleteInputError(msg.toMessage) ) @@ -390,19 +353,18 @@ object MarkupParsers { def xLiteralPattern: Tree = xLiteralCommon( () => { input = parser.in - saving[Boolean, Tree](handle.isPattern, handle.isPattern = _) { + saving[Boolean, Tree](handle.isPattern, handle.isPattern = _): handle.isPattern = true val tree = xPattern xSpaceOpt() tree - } }, msg => parser.syntaxError(msg.toMessage, curOffset) ) - def escapeToScala[A](op: => A, kind: String): A = { + def escapeToScala[A](op: => A, kind: String): A = xEmbeddedBlock = false - val res = saving(parser.in.currentRegion, parser.in.currentRegion = _) { + val res = saving(parser.in.currentRegion, parser.in.currentRegion = _): val lbrace = Scanners.newTokenData lbrace.token = LBRACE lbrace.offset = parser.in.charOffset - 1 @@ -410,12 +372,10 @@ object MarkupParsers { lbrace.lineOffset = parser.in.lineOffset parser.in.resume(lbrace) op - } if (parser.in.token != RBRACE) reportSyntaxError(" expected end of Scala " + kind) res - } def xEmbeddedExpr: Tree = escapeToScala(parser.block(), "block") @@ -424,15 +384,14 @@ object MarkupParsers { def xScalaPatterns: List[Tree] = escapeToScala(parser.patterns(), "pattern") def reportSyntaxError(offset: Int, str: String): Unit = parser.syntaxError(str.toMessage, offset) - def reportSyntaxError(str: String): Unit = { + def reportSyntaxError(str: String): Unit = reportSyntaxError(curOffset, "in XML literal: " + str) nextch() - } /** '<' xPattern ::= Name [S] { xmlPattern | '{' pattern3 '}' } ETag * | Name [S] '/' '>' */ - def xPattern: Tree = { + def xPattern: Tree = var start = curOffset val qname = xName debugLastStartElement = (start, qname) :: debugLastStartElement @@ -444,22 +403,21 @@ object MarkupParsers { if (isEmptyTag) nextch() xToken('>') - if (!isEmptyTag) { + if (!isEmptyTag) // recurses until it hits a termination condition, then returns - def doPattern: Boolean = { + def doPattern: Boolean = val start1 = curOffset if (xEmbeddedBlock) ts ++= xScalaPatterns - else ch match { + else ch match case '<' => // tag nextch() if (ch != '/') ts append xPattern // child else return false // terminate case '{' => // embedded Scala patterns - while (ch == '{') { + while (ch == '{') nextch() ts ++= xScalaPatterns - } assert(!xEmbeddedBlock, "problem with embedded block") case SU => @@ -469,16 +427,11 @@ object MarkupParsers { appendText(Span(start1, curOffset, start1), ts, xText) // here xEmbeddedBlock might be true: // if (xEmbeddedBlock) throw new ApplicationError("after:" + text); // assert - } true - } while (doPattern) { } // call until false xEndTag(qname) debugLastStartElement = debugLastStartElement.tail - } handle.makeXMLpat(Span(start, curOffset, start), qname, ts) - } - } /* class MarkupParser */ -} + /* class MarkupParser */ diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala index 0e70cc077fa4..d769563e899e 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/SymbolicXMLBuilder.scala @@ -25,7 +25,7 @@ import Parsers.Parser * @author Burak Emir * @version 1.0 */ -class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { +class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context): import Constants.Constant import untpd._ @@ -34,7 +34,7 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { private[parsing] var isPattern: Boolean = _ - private object xmltypes extends ScalaTypeNames { + private object xmltypes extends ScalaTypeNames: val _Comment: TypeName = "Comment" val _Elem: TypeName = "Elem" val _EntityRef: TypeName = "EntityRef" @@ -47,9 +47,8 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { val _Text: TypeName = "Text" val _Unparsed: TypeName = "Unparsed" val _UnprefixedAttribute: TypeName = "UnprefixedAttribute" - } - private object xmlterms extends ScalaTermNames { + private object xmlterms extends ScalaTermNames: val _Null: TermName = "Null" val __Elem: TermName = "Elem" val __Text: TermName = "Text" @@ -58,7 +57,6 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { val _plus: TermName = "&+" val _tmpscope: TermName = "$tmpscope" val _xml: TermName = "xml" - } import xmltypes.{_Comment, _Elem, _EntityRef, _Group, _MetaData, _NamespaceBinding, _NodeBuffer, _PrefixedAttribute, _ProcInstr, _Text, _Unparsed, _UnprefixedAttribute} @@ -99,7 +97,6 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { scope: Tree, empty: Boolean, children: collection.Seq[Tree]): Tree = - { def starArgs = if (children.isEmpty) Nil else List(Typed(makeXMLseq(span, children), wildStar)) @@ -108,16 +105,14 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { def nonpat = New(_scala_xml_Elem, List(List(pre, label, attrs, scope, if (empty) Literal(Constant(true)) else Literal(Constant(false))) ::: starArgs)) atSpan(span) { if (isPattern) pat else nonpat } - } final def entityRef(span: Span, n: String): Tree = atSpan(span)( New(_scala_xml_EntityRef, LL(const(n))) ) // create scala.xml.Text here <: scala.xml.Node - final def text(span: Span, txt: String): Tree = atSpan(span) { + final def text(span: Span, txt: String): Tree = atSpan(span): if (isPattern) makeTextPat(const(txt)) else makeText1(const(txt)) - } def makeTextPat(txt: Tree): Apply = Apply(_scala_xml__Text, List(txt)) def makeText1(txt: Tree): Tree = New(_scala_xml_Text, LL(txt)) @@ -131,48 +126,40 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { protected def ProcInstr(target: Tree, txt: Tree): Tree = New(_scala_xml_ProcInstr, LL(target, txt)) /** @todo: attributes */ - def makeXMLpat(span: Span, n: String, args: collection.Seq[Tree]): Tree = { - val (prepat, labpat) = splitPrefix(n) match { + def makeXMLpat(span: Span, n: String, args: collection.Seq[Tree]): Tree = + val (prepat, labpat) = splitPrefix(n) match case (Some(pre), rest) => (const(pre), const(rest)) case _ => (wild, const(n)) - } mkXML(span, true, prepat, labpat, null, null, false, args) - } - protected def convertToTextPat(t: Tree): Tree = t match { + protected def convertToTextPat(t: Tree): Tree = t match case _: Literal => makeTextPat(t) case _ => t - } protected def convertToTextPat(buf: collection.Seq[Tree]): List[Tree] = (buf map convertToTextPat).toList - def parseAttribute(span: Span, s: String): Tree = { + def parseAttribute(span: Span, s: String): Tree = val ts = Utility.parseAttributeValue(s, text(span, _), entityRef(span, _)) - ts match { + ts match case Nil => TypedSplice(tpd.ref(defn.NilModule).withSpan(span)) case t :: Nil => t case _ => makeXMLseq(span, ts) - } - } - def isEmptyText(t: Tree): Boolean = t match { + def isEmptyText(t: Tree): Boolean = t match case Literal(Constant("")) => true case _ => false - } /** could optimize if args.length == 0, args.length == 1 AND args(0) is <: Node. */ - def makeXMLseq(span: Span, args: collection.Seq[Tree]): Block = { + def makeXMLseq(span: Span, args: collection.Seq[Tree]): Block = val buffer = ValDef(_buf, TypeTree(), New(_scala_xml_NodeBuffer, ListOfNil)) val applies = args filterNot isEmptyText map (t => Apply(Select(Ident(_buf), _plus), List(t))) atSpan(span)(new XMLBlock(buffer :: applies.toList, Ident(_buf)) ) - } /** Returns (Some(prefix) | None, rest) based on position of ':' */ - def splitPrefix(name: String): (Option[String], String) = name.splitWhere(_ == ':', doDropIndex = true) match { + def splitPrefix(name: String): (Option[String], String) = name.splitWhere(_ == ':', doDropIndex = true) match case Some((pre, rest)) => (Some(pre), rest) case _ => (None, name) - } /** Various node constructions. */ def group(span: Span, args: collection.Seq[Tree]): Tree = @@ -181,38 +168,33 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { def unparsed(span: Span, str: String): Tree = atSpan(span)( New(_scala_xml_Unparsed, LL(const(str))) ) - def element(span: Span, qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: collection.Seq[Tree]): Tree = { - def handleNamespaceBinding(pre: String, z: String): Tree = { + def element(span: Span, qname: String, attrMap: mutable.Map[String, Tree], empty: Boolean, args: collection.Seq[Tree]): Tree = + def handleNamespaceBinding(pre: String, z: String): Tree = def mkAssign(t: Tree): Tree = Assign( Ident(_tmpscope), New(_scala_xml_NamespaceBinding, LL(const(pre), t, Ident(_tmpscope))) ) - val uri1 = attrMap(z) match { + val uri1 = attrMap(z) match case Apply(_, List(uri @ Literal(Constant(_)))) => mkAssign(uri) case Select(_, nme.Nil) => mkAssign(const(null)) // allow for xmlns="" -- bug #1626 case x => mkAssign(x) - } attrMap -= z uri1 - } /** Extract all the namespaces from the attribute map. */ val namespaces: List[Tree] = - for (z <- attrMap.keys.toList ; if z startsWith "xmlns") yield { - val ns = splitPrefix(z) match { + for (z <- attrMap.keys.toList ; if z startsWith "xmlns") yield + val ns = splitPrefix(z) match case (Some(_), rest) => rest case _ => null - } handleNamespaceBinding(ns, z) - } - val (pre, newlabel) = splitPrefix(qname) match { + val (pre, newlabel) = splitPrefix(qname) match case (Some(p), x) => (p, x) case (None, x) => (null, x) - } - def mkAttributeTree(pre: String, key: String, value: Tree) = atSpan(span.toSynthetic) { + def mkAttributeTree(pre: String, key: String, value: Tree) = atSpan(span.toSynthetic): // XXX this is where we'd like to put Select(value, nme.toString_) for #1787 // after we resolve the Some(foo) situation. val baseArgs = List(const(key), value, Ident(_md)) @@ -221,16 +203,14 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { else (_scala_xml_PrefixedAttribute , const(pre) :: baseArgs) Assign(Ident(_md), New(clazz, LL(attrArgs: _*))) - } def handlePrefixedAttribute(pre: String, key: String, value: Tree) = mkAttributeTree(pre, key, value) def handleUnprefixedAttribute(key: String, value: Tree) = mkAttributeTree(null, key, value) val attributes: List[Tree] = - for ((k, v) <- attrMap.toList.reverse) yield splitPrefix(k) match { + for ((k, v) <- attrMap.toList.reverse) yield splitPrefix(k) match case (Some(pre), rest) => handlePrefixedAttribute(pre, rest, v) case _ => handleUnprefixedAttribute(k, v) - } lazy val scopeDef = ValDef(_scope, _scala_xml_NamespaceBinding, Ident(_tmpscope)) lazy val tmpScopeDef = ValDef(_tmpscope, _scala_xml_NamespaceBinding, Ident(_scope)).withFlags(Mutable) @@ -238,12 +218,11 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { val makeSymbolicAttrs = if (!attributes.isEmpty) Ident(_md) else _scala_xml_Null val (attrResult, nsResult) = - (attributes.isEmpty, namespaces.isEmpty) match { + (attributes.isEmpty, namespaces.isEmpty) match case (true , true) => (Nil, Nil) case (true , false) => (scopeDef :: Nil, tmpScopeDef :: namespaces) case (false, true) => (metadataDef :: attributes, Nil) case (false, false) => (scopeDef :: metadataDef :: attributes, tmpScopeDef :: namespaces) - } val body = mkXML( span.toSynthetic, @@ -257,5 +236,3 @@ class SymbolicXMLBuilder(parser: Parser, preserveWS: Boolean)(using Context) { ) atSpan(span.toSynthetic)(new XMLBlock(nsResult, new XMLBlock(attrResult, body))) - } -} diff --git a/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala b/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala index 87412cf6d69c..7fb257165215 100644 --- a/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala +++ b/compiler/src/dotty/tools/dotc/parsing/xml/Utility.scala @@ -12,7 +12,7 @@ import scala.collection.mutable * * @author Burak Emir */ -object Utility { +object Utility: import util.Chars.SU private val unescMap = Map( @@ -32,51 +32,43 @@ object Utility { private final def unescape(ref: String, s: StringBuilder): StringBuilder = ((unescMap get ref) map (s append _)).orNull - def parseAttributeValue[T](value: String, text: String => T, entityRef: String => T): List[T] = { + def parseAttributeValue[T](value: String, text: String => T, entityRef: String => T): List[T] = val sb = new StringBuilder var rfb: StringBuilder = null val nb = new mutable.ListBuffer[T]() val it = value.iterator - while (it.hasNext) { + while (it.hasNext) var c = it.next() // entity! flush buffer into text node - if (c == '&') { + if (c == '&') c = it.next() - if (c == '#') { + if (c == '#') c = it.next() val theChar = parseCharRef ({ ()=> c },{ () => c = it.next() },{s => throw new RuntimeException(s)}, {s => throw new RuntimeException(s)}) sb.append(theChar) - } - else { + else if (rfb eq null) rfb = new StringBuilder() rfb append c c = it.next() - while (c != ';') { + while (c != ';') rfb.append(c) c = it.next() - } val ref = rfb.toString() rfb.clear() - unescape(ref,sb) match { + unescape(ref,sb) match case null => - if (!sb.isEmpty) { // flush buffer + if (!sb.isEmpty) // flush buffer nb += text(sb.toString()) sb.clear() - } nb += entityRef(ref) // add entityref case _ => - } - } - } else sb append c - } if (!sb.isEmpty) // flush buffer nb += text(sb.toString()) nb.toList - } /** * {{{ @@ -85,13 +77,13 @@ object Utility { * }}} * See [66] */ - def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = { + def parseCharRef(ch: () => Char, nextch: () => Unit, reportSyntaxError: String => Unit, reportTruncatedError: String => Unit): String = val hex = ch() == 'x' if (hex) nextch() val base = if (hex) 16 else 10 var i = 0 - while (ch() != ';') { - ch() match { + while (ch() != ';') + ch() match case '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' => i = i * base + ch().asDigit case 'a' | 'b' | 'c' | 'd' | 'e' | 'f' @@ -105,19 +97,15 @@ object Utility { reportTruncatedError("") case _ => reportSyntaxError("character '" + ch() + "' not allowed in char ref\n") - } nextch() - } new String(Array(i), 0, 1) - } /** {{{ * (#x20 | #x9 | #xD | #xA) * }}} */ - final def isSpace(ch: Char): Boolean = ch match { + final def isSpace(ch: Char): Boolean = ch match case '\u0009' | '\u000A' | '\u000D' | '\u0020' => true case _ => false - } /** {{{ * (#x20 | #x9 | #xD | #xA)+ * }}} */ @@ -129,7 +117,7 @@ object Utility { * }}} * See [4] and Appendix B of XML 1.0 specification. */ - def isNameChar(ch: Char): Boolean = { + def isNameChar(ch: Char): Boolean = import java.lang.Character._ // The constants represent groups Mc, Me, Mn, Lm, and Nd. @@ -139,7 +127,6 @@ object Utility { MODIFIER_LETTER | DECIMAL_DIGIT_NUMBER => true case _ => ".-:" contains ch }) - } /** {{{ * NameStart ::= ( Letter | '_' ) @@ -150,16 +137,14 @@ object Utility { * We do not allow a name to start with `:`. * See [3] and Appendix B of XML 1.0 specification */ - def isNameStart(ch: Char): Boolean = { + def isNameStart(ch: Char): Boolean = import java.lang.Character._ - getType(ch).toByte match { + getType(ch).toByte match case LOWERCASE_LETTER | UPPERCASE_LETTER | OTHER_LETTER | TITLECASE_LETTER | LETTER_NUMBER => true case _ => ch == '_' - } - } /** {{{ * Name ::= ( Letter | '_' ) (NameChar)* @@ -169,5 +154,4 @@ object Utility { def isName(s: String): Boolean = s.nonEmpty && isNameStart(s.head) && (s.tail forall isNameChar) -} diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala index 1baf3a06ad9e..b6933a88eb50 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugin.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugin.scala @@ -14,11 +14,10 @@ import java.util.Properties import scala.util.{ Try, Success, Failure } -trait PluginPhase extends MiniPhase { +trait PluginPhase extends MiniPhase: def runsBefore: Set[String] = Set.empty -} -sealed trait Plugin { +sealed trait Plugin: /** The name of this plugin */ def name: String @@ -38,23 +37,21 @@ sealed trait Plugin { * should be listed with the `-P:plugname:` part included. */ val optionsHelp: Option[String] = None -} /** A standard plugin can be inserted into the normal compilation pipeline */ -trait StandardPlugin extends Plugin { +trait StandardPlugin extends Plugin: /** Non-research plugins should override this method to return the phases * * @param options commandline options to the plugin. * @return a list of phases to be added to the phase plan */ def init(options: List[String]): List[PluginPhase] -} /** A research plugin may customize the compilation pipeline freely * * @note Research plugins are only supported by nightly or snapshot build of the compiler. */ -trait ResearchPlugin extends Plugin { +trait ResearchPlugin extends Plugin: /** Research plugins should override this method to return the new phase plan * * @param options commandline options to the plugin, `-P:plugname:opt1,opt2` becomes List(opt1, opt2) @@ -62,37 +59,33 @@ trait ResearchPlugin extends Plugin { * @return the new phase plan */ def init(options: List[String], plan: List[List[Phase]])(using Context): List[List[Phase]] -} -object Plugin { +object Plugin: private val PluginFile = "plugin.properties" /** Create a class loader with the specified locations plus * the loader that loaded the Scala compiler. */ - private def loaderFor(locations: Seq[Path]): ClassLoader = { + private def loaderFor(locations: Seq[Path]): ClassLoader = val compilerLoader = classOf[Plugin].getClassLoader val urls = locations map (_.toURL) new java.net.URLClassLoader(urls.toArray, compilerLoader) - } type AnyClass = Class[?] /** Use a class loader to load the plugin class. */ - def load(classname: String, loader: ClassLoader): Try[AnyClass] = { + def load(classname: String, loader: ClassLoader): Try[AnyClass] = import scala.util.control.NonFatal try Success[AnyClass](loader loadClass classname) - catch { + catch case NonFatal(e) => Failure(new PluginLoadException(classname, s"Error: unable to load class $classname: ${e.getMessage}")) case e: NoClassDefFoundError => Failure(new PluginLoadException(classname, s"Error: class not found: ${e.getMessage} required by $classname")) - } - } /** Load all plugins specified by the arguments. * Each location of `paths` must be a valid plugin archive or exploded archive. @@ -104,9 +97,9 @@ object Plugin { def loadAllFrom( paths: List[List[Path]], dirs: List[Path], - ignoring: List[String]): List[Try[Plugin]] = { + ignoring: List[String]): List[Try[Plugin]] = - def fromFile(inputStream: InputStream, path: Path): String = { + def fromFile(inputStream: InputStream, path: Path): String = val props = new Properties props.load(inputStream) inputStream.close() @@ -115,14 +108,12 @@ object Plugin { if (pluginClass == null) throw new RuntimeException("Bad plugin descriptor: " + path) else pluginClass - } - def loadDescriptionFromDir(f: Path): Try[String] = { + def loadDescriptionFromDir(f: Path): Try[String] = val path = f / PluginFile Try(fromFile(new java.io.FileInputStream(path.jpath.toFile), path)) - } - def loadDescriptionFromJar(jarp: Path): Try[String] = { + def loadDescriptionFromJar(jarp: Path): Try[String] = // XXX Return to this once we have more ARM support def read(is: InputStream) = if (is == null) throw new PluginLoadException(jarp.path, s"Missing $PluginFile in $jarp") @@ -130,7 +121,6 @@ object Plugin { val fileEntry = new java.util.jar.JarEntry(PluginFile) Try(read(new Jar(jarp.jpath.toFile).getEntryStream(fileEntry))) - } // List[(jar, Try(descriptor))] in dir def scan(d: Directory) = @@ -140,23 +130,20 @@ object Plugin { // scan plugin dirs for jars containing plugins, ignoring dirs with none and other jars val fromDirs: PDResults = dirs filter (_.isDirectory) flatMap { d => - scan(d.toDirectory) collect { + scan(d.toDirectory) collect: case (j, Success(pd)) => Success((pd, loaderFor(Seq(j)))) - } } // scan jar paths for plugins, taking the first plugin you find. // a path element can be either a plugin.jar or an exploded dir. - def findDescriptor(ps: List[Path]) = { - def loop(qs: List[Path]): Try[String] = qs match { + def findDescriptor(ps: List[Path]) = + def loop(qs: List[Path]): Try[String] = qs match case Nil => Failure(new MissingPluginException(ps)) case p :: rest => if (p.isDirectory) loadDescriptionFromDir(p.toDirectory) orElse loop(rest) else if (p.isFile) loadDescriptionFromJar(p.toFile) orElse loop(rest) else loop(rest) - } loop(ps) - } val fromPaths: PDResults = paths map (p => findDescriptor(p) match { case Success(classname) => Success((classname, loaderFor(p))) @@ -172,25 +159,20 @@ object Plugin { Failure(new PluginLoadException(plugin.name, s"Ignoring duplicate plugin ${plugin.name} (${classname})")) else if (ignoring contains plugin.name) Failure(new PluginLoadException(plugin.name, s"Disabling plugin ${plugin.name}")) - else { + else seen += classname Success(plugin) - } } }) enabled // distinct and not disabled - } /** Instantiate a plugin class, given the class and * the compiler it is to be used in. */ def instantiate(clazz: AnyClass): Plugin = clazz.getConstructor().newInstance().asInstanceOf[Plugin] -} -class PluginLoadException(val path: String, message: String, cause: Exception) extends Exception(message, cause) { +class PluginLoadException(val path: String, message: String, cause: Exception) extends Exception(message, cause): def this(path: String, message: String) = this(path, message, null) -} -class MissingPluginException(path: String) extends PluginLoadException(path, s"No plugin in path $path") { +class MissingPluginException(path: String) extends PluginLoadException(path, s"No plugin in path $path"): def this(paths: List[Path]) = this(paths mkString File.pathSeparator) -} diff --git a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala index c44fe4cf59b4..1025a5a81a4e 100644 --- a/compiler/src/dotty/tools/dotc/plugins/Plugins.scala +++ b/compiler/src/dotty/tools/dotc/plugins/Plugins.scala @@ -17,7 +17,7 @@ import config.Printers.plugins.{ println => debug } * @version 1.1, 2009/1/2 * Updated 2009/1/2 by Anders Bach Nielsen: Added features to implement SIP 00002 */ -trait Plugins { +trait Plugins: self: ContextBase => /** Load a rough list of the plugins. For speed, it @@ -25,13 +25,12 @@ trait Plugins { * test for same-named phases or other problems that are * filtered from the final list of plugins. */ - protected def loadRoughPluginsList(using Context): List[Plugin] = { + protected def loadRoughPluginsList(using Context): List[Plugin] = def asPath(p: String) = ClassPath split p val paths = ctx.settings.plugin.value filter (_ != "") map (s => asPath(s) map Path.apply) - val dirs = { + val dirs = def injectDefault(s: String) = if (s.isEmpty) PathResolver.Defaults.scalaPluginPath else s asPath(ctx.settings.pluginsDir.value) map injectDefault map Path.apply - } val maybes = Plugin.loadAllFrom(paths, dirs, ctx.settings.disable.value) val (goods, errors) = maybes partition (_.isSuccess) // Explicit parameterization of recover to avoid -Xlint warning about inferred Any @@ -42,25 +41,23 @@ trait Plugins { }) goods map (_.get) - } private var _roughPluginsList: List[Plugin] = _ protected def roughPluginsList(using Context): List[Plugin] = - if (_roughPluginsList == null) { + if (_roughPluginsList == null) _roughPluginsList = loadRoughPluginsList _roughPluginsList - } else _roughPluginsList /** Load all available plugins. Skips plugins that * either have the same name as another one, or which * define a phase name that another one does. */ - protected def loadPlugins(using Context): List[Plugin] = { + protected def loadPlugins(using Context): List[Plugin] = // remove any with conflicting names or subcomponent names def pick( plugins: List[Plugin], - plugNames: Set[String]): List[Plugin] = { + plugNames: Set[String]): List[Plugin] = if (plugins.isEmpty) return Nil // early return val plug :: tail = plugins: @unchecked @@ -74,11 +71,9 @@ trait Plugins { fail("[skipping a repeated plugin: %s]") else if (ctx.settings.disable.value contains plug.name) fail("[disabling plugin: %s]") - else { + else note("[loaded plugin %s]") withPlug - } - } val plugs = pick(roughPluginsList, ctx.base.phasePlan.flatten.map(_.phaseName).toSet) @@ -94,14 +89,12 @@ trait Plugins { report.error(em"bad option: -P:$opt") plugs - } private var _plugins: List[Plugin] = _ def plugins(using Context): List[Plugin] = - if (_plugins == null) { + if (_plugins == null) _plugins = loadPlugins _plugins - } else _plugins /** A description of all the plugins that are loaded */ @@ -115,11 +108,10 @@ trait Plugins { }).mkString /** Add plugin phases to phase plan */ - def addPluginPhases(plan: List[List[Phase]])(using Context): List[List[Phase]] = { - def options(plugin: Plugin): List[String] = { + def addPluginPhases(plan: List[List[Phase]])(using Context): List[List[Phase]] = + def options(plugin: Plugin): List[String] = def namec = plugin.name + ":" ctx.settings.pluginOptions.value filter (_ startsWith namec) map (_ stripPrefix namec) - } // schedule plugins according to ordering constraints val pluginPhases = plugins.collect { case p: StandardPlugin => p }.flatMap { plug => plug.init(options(plug)) } @@ -127,15 +119,12 @@ trait Plugins { // add research plugins if (Feature.isExperimentalEnabled) - plugins.collect { case p: ResearchPlugin => p }.foldRight(updatedPlan) { + plugins.collect { case p: ResearchPlugin => p }.foldRight(updatedPlan): (plug, plan) => plug.init(options(plug), plan) - } else updatedPlan - } -} -object Plugins { +object Plugins: /** Insert plugin phases in the right place of the phase plan * * The scheduling makes sure the ordering constraints of plugin phases are satisfied. @@ -143,7 +132,7 @@ object Plugins { * * Note: this algorithm is factored out for unit test. */ - def schedule(plan: List[List[Phase]], pluginPhases: List[PluginPhase]): List[List[Phase]] = { + def schedule(plan: List[List[Phase]], pluginPhases: List[PluginPhase]): List[List[Phase]] = import scala.collection.mutable.{ Map => MMap } type OrderingReq = (Set[String], Set[String]) @@ -162,14 +151,13 @@ object Plugins { var updatedPlan = plan - def constraintConflict(phase: Phase): String = { + def constraintConflict(phase: Phase): String = val (runsAfter, runsBefore) = orderRequirements(phase.phaseName) s""" |Ordering conflict for phase ${phase.phaseName} |after: ${runsAfter.mkString("[", ", ", "]")} |before: ${runsBefore.mkString("[", ", ", "]")} """.stripMargin - } // init ordering map, no propagation pluginPhases.foreach { phase => @@ -204,26 +192,24 @@ object Plugins { ) // propagate constraints from related phases to current phase: transitivity - def propagate(phase: Phase): OrderingReq = { + def propagate(phase: Phase): OrderingReq = def propagateRunsBefore(beforePhase: String): Set[String] = if (beforePhase == phase.phaseName) throw new Exception(constraintConflict(phase)) else if (isInserted(beforePhase)) Set(beforePhase) - else { + else val (_, runsBefore) = orderRequirements(beforePhase) runsBefore.flatMap(propagateRunsBefore) + beforePhase - } def propagateRunsAfter(afterPhase: String): Set[String] = if (afterPhase == phase.phaseName) throw new Exception(constraintConflict(phase)) else if (isInserted(afterPhase)) Set(afterPhase) - else { + else val (runsAfter, _) = orderRequirements(afterPhase) runsAfter.flatMap(propagateRunsAfter) + afterPhase - } var (runsAfter, runsBefore) = orderRequirements(phase.phaseName) @@ -231,7 +217,6 @@ object Plugins { runsBefore = runsBefore.flatMap(propagateRunsBefore) (runsAfter, runsBefore) - } pluginPhases.sortBy(_.phaseName).foreach { phase => var (runsAfter1, runsBefore1) = propagate(phase) @@ -271,5 +256,3 @@ object Plugins { } updatedPlan - } -} diff --git a/compiler/src/dotty/tools/dotc/printing/Formatting.scala b/compiler/src/dotty/tools/dotc/printing/Formatting.scala index 3f32b29654c9..c6913dbce4d3 100644 --- a/compiler/src/dotty/tools/dotc/printing/Formatting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Formatting.scala @@ -13,7 +13,7 @@ import reporting.Message import util.DiffUtil import Highlighting._ -object Formatting { +object Formatting: object ShownDef: /** Represents a value that has been "shown" and can be consumed by StringFormatter. @@ -119,10 +119,10 @@ object Formatting { * of the string context *before* inserting the arguments. That way, we guard * against accidentally treating an interpolated value as a margin. */ - class StringFormatter(protected val sc: StringContext) { + class StringFormatter(protected val sc: StringContext): protected def showArg(arg: Any)(using Context): String = arg.tryToShow - private def treatArg(arg: Shown, suffix: String)(using Context): (String, String) = arg.ctxShow match { + private def treatArg(arg: Shown, suffix: String)(using Context): (String, String) = arg.ctxShow match case arg: Seq[?] if suffix.indexOf('%') == 0 && suffix.indexOf('%', 1) != -1 => val end = suffix.indexOf('%', 1) val sep = StringContext.processEscapes(suffix.substring(1, end)) @@ -131,22 +131,17 @@ object Formatting { (arg.map(showArg).mkString("[", ", ", "]"), suffix) case arg => (showArg(arg), suffix) - } - def assemble(args: Seq[Shown])(using Context): String = { + def assemble(args: Seq[Shown])(using Context): String = def isLineBreak(c: Char) = c == '\n' || c == '\f' // compatible with StringLike#isLineBreak - def stripTrailingPart(s: String) = { + def stripTrailingPart(s: String) = val (pre, post) = s.span(c => !isLineBreak(c)) pre ++ post.stripMargin - } - val (prefix, suffixes) = sc.parts.toList match { + val (prefix, suffixes) = sc.parts.toList match case head :: tail => (head.stripMargin, tail map stripTrailingPart) case Nil => ("", Nil) - } val (args1, suffixes1) = args.lazyZip(suffixes).map(treatArg(_, _)).unzip new StringContext(prefix :: suffixes1.toList: _*).s(args1: _*) - } - } /** This method will produce a colored type diff from the given arguments. * The idea is to do this for known cases that are useful and then fall back @@ -175,4 +170,3 @@ object Formatting { def hlAsKeyword(str: String)(using Context): String = if str.isEmpty || ctx.settings.color.value == "never" then str else s"${SyntaxHighlighting.KeywordColor}$str${SyntaxHighlighting.NoColor}" -} diff --git a/compiler/src/dotty/tools/dotc/printing/Highlighting.scala b/compiler/src/dotty/tools/dotc/printing/Highlighting.scala index ceb5afdea750..fa88cae57daa 100644 --- a/compiler/src/dotty/tools/dotc/printing/Highlighting.scala +++ b/compiler/src/dotty/tools/dotc/printing/Highlighting.scala @@ -5,9 +5,9 @@ package printing import scala.collection.mutable import core.Contexts._ -object Highlighting { +object Highlighting: - abstract class Highlight(private val highlight: String) { + abstract class Highlight(private val highlight: String): def text: String def show(using Context): String = if ctx.useColors then highlight + text + Console.RESET else text @@ -20,32 +20,27 @@ object Highlighting { def +(other: String)(using Context): HighlightBuffer = new HighlightBuffer(this) + other - } - abstract class Modifier(private val mod: String, text: String) extends Highlight(Console.RESET) { + abstract class Modifier(private val mod: String, text: String) extends Highlight(Console.RESET): override def show(using Context): String = if (ctx.settings.color.value == "never") "" else mod + super.show - } - case class HighlightBuffer(hl: Highlight)(using Context) { + case class HighlightBuffer(hl: Highlight)(using Context): private val buffer = new mutable.ListBuffer[String] buffer += hl.show - def +(other: Highlight): HighlightBuffer = { + def +(other: Highlight): HighlightBuffer = buffer += other.show this - } - def +(other: String): HighlightBuffer = { + def +(other: String): HighlightBuffer = buffer += other this - } override def toString: String = buffer.mkString - } case class NoColor(text: String) extends Highlight(Console.RESET) @@ -69,4 +64,3 @@ object Highlighting { case class Bold(text: String) extends Modifier(Console.BOLD, text) case class Underlined(text: String) extends Modifier(Console.UNDERLINED, text) -} diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index f3540502597c..e51cadd8cf02 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -17,7 +17,7 @@ import scala.annotation.switch import config.{Config, Feature} import cc.{CapturingType, EventuallyCapturingType, CaptureSet, isBoxed} -class PlainPrinter(_ctx: Context) extends Printer { +class PlainPrinter(_ctx: Context) extends Printer: /** The context of all public methods in Printer and subclasses. * Overridden in RefinedPrinter. @@ -50,7 +50,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def homogenize(tp: Type): Type = if (homogenizedView) - tp match { + tp match case tp: ThisType if tp.cls.is(Package) && !tp.cls.isEffectiveRoot => requiredPackage(tp.cls.fullName).termRef case tp: TypeVar if tp.isInstantiated => @@ -73,16 +73,14 @@ class PlainPrinter(_ctx: Context) extends Printer { tp.reduceProjection case _ => tp - } else tp private def sameBound(lo: Type, hi: Type): Boolean = try lo frozen_=:= hi catch { case NonFatal(ex) => false } - private def homogenizeArg(tp: Type) = tp match { + private def homogenizeArg(tp: Type) = tp match case TypeBounds(lo, hi) if homogenizedView && sameBound(lo, hi) => homogenize(hi) case _ => tp - } private def selfRecName(n: Int) = s"z$n" @@ -112,12 +110,11 @@ class PlainPrinter(_ctx: Context) extends Printer { /** String representation of a refinement */ protected def toTextRefinement(rt: RefinedType): Text = - val keyword = rt.refinedInfo match { + val keyword = rt.refinedInfo match case _: ExprType | _: MethodOrPoly => "def " case _: TypeBounds => "type " case _: TypeProxy => "val " case _ => "" - } (keyword ~ refinementNameString(rt) ~ toTextRHS(rt.refinedInfo)).close protected def argText(arg: Type, isErased: Boolean = false): Text = keywordText("erased ").provided(isErased) ~ (homogenizeArg(arg) match { @@ -166,7 +163,7 @@ class PlainPrinter(_ctx: Context) extends Printer { final protected def rootSetText = Str("{cap}") def toText(tp: Type): Text = controlled { - homogenize(tp) match { + homogenize(tp) match case tp: TypeType => toTextRHS(tp) case tp: TermRef @@ -198,10 +195,9 @@ class PlainPrinter(_ctx: Context) extends Printer { refinementChain(tp).reverse: @unchecked toTextLocal(parent) ~ "{" ~ Text(refined map toTextRefinement, "; ").close ~ "}" case tp: RecType => - try { + try openRecs = tp :: openRecs "{" ~ selfRecName(openRecs.length) ~ " => " ~ toTextGlobal(tp.parent) ~ "}" - } finally openRecs = openRecs.tail case AndType(tp1, tp2) => changePrec(AndTypePrec) { toText(tp1) ~ " & " ~ atPrec(AndTypePrec + 1) { toText(tp2) } } @@ -209,11 +205,10 @@ class PlainPrinter(_ctx: Context) extends Printer { changePrec(OrTypePrec) { toText(tp1) ~ " | " ~ atPrec(OrTypePrec + 1) { toText(tp2) } } case MatchType(bound, scrutinee, cases) => changePrec(GlobalPrec) { - def caseText(tp: Type): Text = tp match { + def caseText(tp: Type): Text = tp match case tp: HKTypeLambda => caseText(tp.resultType) case defn.MatchCase(pat, body) => "case " ~ toText(pat) ~ " => " ~ toText(body) case _ => "case " ~ toText(tp) - } def casesText = Text(cases.map(caseText), "\n") atPrec(InfixPrec) { toText(scrutinee) } ~ keywordStr(" match ") ~ "{" ~ casesText ~ "}" ~ @@ -234,7 +229,7 @@ class PlainPrinter(_ctx: Context) extends Printer { case NoPrefix => "" case tp: MethodType => - changePrec(GlobalPrec) { + changePrec(GlobalPrec): "(" ~ keywordText("using ").provided(tp.isContextualMethod) ~ keywordText("implicit ").provided(tp.isImplicitMethod && !tp.isContextualMethod) @@ -242,7 +237,6 @@ class PlainPrinter(_ctx: Context) extends Printer { ~ ")" ~ (Str(": ") provided !tp.resultType.isInstanceOf[MethodOrPoly]) ~ toText(tp.resultType) - } case ExprType(restp) => def arrowText: Text = restp match case ct @ EventuallyCapturingType(parent, refs) if ct.annot.symbol == defn.RetainsByNameAnnot => @@ -251,15 +245,13 @@ class PlainPrinter(_ctx: Context) extends Printer { if Feature.pureFunsEnabled then "->" else "=>" changePrec(GlobalPrec)(arrowText ~ " " ~ toText(restp)) case tp: HKTypeLambda => - changePrec(GlobalPrec) { + changePrec(GlobalPrec): "[" ~ paramsText(tp) ~ "]" ~ lambdaHash(tp) ~ Str(" =>> ") ~ toTextGlobal(tp.resultType) - } case tp: PolyType => - changePrec(GlobalPrec) { + changePrec(GlobalPrec): "[" ~ paramsText(tp) ~ "]" ~ lambdaHash(tp) ~ (Str(": ") provided !tp.resultType.isInstanceOf[MethodOrPoly]) ~ toTextGlobal(tp.resultType) - } case AnnotatedType(tpe, annot) => if annot.symbol == defn.InlineParamAnnot || annot.symbol == defn.ErasedParamAnnot then toText(tpe) else toTextLocal(tpe) ~ " " ~ toText(annot) @@ -267,7 +259,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def toTextCaret(tp: Type) = if printDebug then toTextLocal(tp) ~ Str("^") else toText(tp) if (tp.isInstantiated) toTextCaret(tp.instanceOpt) - else { + else val constr = ctx.typerState.constraint val bounds = if constr.contains(tp) then @@ -277,30 +269,26 @@ class PlainPrinter(_ctx: Context) extends Printer { if (bounds.isTypeAlias) toTextCaret(bounds.lo) else if (ctx.settings.YshowVarBounds.value) "(" ~ toText(tp.origin) ~ "?" ~ toText(bounds) ~ ")" else toText(tp.origin) - } case tp: LazyRef => def refTxt = try toTextGlobal(tp.ref) - catch { + catch case ex: Throwable => Str("...") - } "LazyRef(" ~ refTxt ~ ")" case Range(lo, hi) => toText(lo) ~ ".." ~ toText(hi) case _ => tp.fallbackToText(this) - } }.close def toTextSingleton(tp: SingletonType): Text = "(" ~ toTextRef(tp) ~ " : " ~ toTextGlobal(tp.underlying) ~ ")" - protected def paramsText(lam: LambdaType): Text = { + protected def paramsText(lam: LambdaType): Text = val erasedParams = lam.erasedParams def paramText(name: Name, tp: Type, erased: Boolean) = keywordText("erased ").provided(erased) ~ toText(name) ~ lambdaHash(lam) ~ toTextRHS(tp, isParameter = true) Text(lam.paramNames.lazyZip(lam.paramInfos).lazyZip(erasedParams).map(paramText), ", ") - } protected def ParamRefNameString(name: Name): String = nameString(name) @@ -344,14 +332,13 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def trimPrefix(text: Text): Text = text.stripPrefix(objectPrefix).stripPrefix(packagePrefix) - protected def selectionString(tp: NamedType): String = { + protected def selectionString(tp: NamedType): String = val sym = if (homogenizedView) tp.symbol else tp.currentSymbol if (sym.exists) nameString(sym) else nameString(tp.name) - } /** The string representation of this type used as a prefix */ - def toTextRef(tp: SingletonType): Text = controlled { - tp match { + def toTextRef(tp: SingletonType): Text = controlled: + tp match case tp: TermRef => toTextPrefix(tp.prefix) ~ selectionString(tp) case tp: ThisType => @@ -372,17 +359,13 @@ class PlainPrinter(_ctx: Context) extends Printer { if (homogenizedView) toText(tp.info) else if (ctx.settings.XprintTypes.value) "<" ~ toText(tp.repr) ~ ":" ~ toText(tp.info) ~ ">" else toText(tp.repr) - } - } /** The string representation of this type used as a prefix, including separator */ - def toTextPrefix(tp: Type): Text = controlled { - homogenize(tp) match { + def toTextPrefix(tp: Type): Text = controlled: + homogenize(tp) match case NoPrefix => "" case tp: SingletonType => toTextRef(tp) ~ "." case tp => trimPrefix(toTextLocal(tp)) ~ "#" - } - } def toTextCaptureRef(tp: Type): Text = homogenize(tp) match @@ -399,10 +382,9 @@ class PlainPrinter(_ctx: Context) extends Printer { /** String representation of a definition's type following its name, * if symbol is completed, "?" otherwise. */ - protected def toTextRHS(optType: Option[Type]): Text = optType match { + protected def toTextRHS(optType: Option[Type]): Text = optType match case Some(tp) => toTextRHS(tp) case None => "?" - } protected def decomposeLambdas(bounds: TypeBounds): (Text, TypeBounds) = def decompose(tp: Type): (Text, Type) = tp.stripTypeVar match @@ -428,8 +410,8 @@ class PlainPrinter(_ctx: Context) extends Printer { end decomposeLambdas /** String representation of a definition's type following its name */ - protected def toTextRHS(tp: Type, isParameter: Boolean = false): Text = controlled { - homogenize(tp) match { + protected def toTextRHS(tp: Type, isParameter: Boolean = false): Text = controlled: + homogenize(tp) match case tp: TypeBounds => val (tparamStr, rhs) = decomposeLambdas(tp) val binder = rhs match @@ -444,11 +426,10 @@ class PlainPrinter(_ctx: Context) extends Printer { val (tparams, otherDecls) = decls.toList partition treatAsTypeParam val tparamsText = if (tparams.isEmpty) Text() else ("[" ~ dclsText(tparams) ~ "]").close - val selfText: Text = selfInfo match { + val selfText: Text = selfInfo match case NoType => Text() case sym: Symbol if !sym.isCompleted => "this: ? =>" case _ => "this: " ~ atPrec(InfixPrec) { toText(tp.selfType) } ~ " =>" - } val trueDecls = otherDecls.filterNot(treatAsTypeArg) val declsText = if (trueDecls.isEmpty || !ctx.settings.Ydebug.value) Text() @@ -466,8 +447,6 @@ class PlainPrinter(_ctx: Context) extends Printer { ~ toTextGlobal(tp.resultType) case tp => ": " ~ toTextGlobal(tp) - } - } protected def toTextParents(parents: List[Type]): Text = Text(parents.map(toTextLocal), " with ") @@ -475,7 +454,7 @@ class PlainPrinter(_ctx: Context) extends Printer { protected def treatAsTypeArg(sym: Symbol): Boolean = false /** String representation of symbol's kind. */ - def kindString(sym: Symbol): String = { + def kindString(sym: Symbol): String = val flags = sym.flagsUNSAFE if (flags.is(PackageClass)) "package class" else if (flags.is(PackageVal)) "package" @@ -499,10 +478,9 @@ class PlainPrinter(_ctx: Context) extends Printer { else if (sym.is(Method)) "method" else if (sym.isTerm) "value" else "" - } /** String representation of symbol's definition keyword */ - protected def keyString(sym: Symbol): String = { + protected def keyString(sym: Symbol): String = val flags = sym.flagsUNSAFE if (flags.isAllOf(JavaInterface)) "interface" else if (flags.is(Trait)) "trait" @@ -514,7 +492,6 @@ class PlainPrinter(_ctx: Context) extends Printer { else if (sym.is(Method)) "def" else if (sym.isTerm && !flags.is(Param)) "val" else "" - } protected def privateWithinString(sym: Symbol): String = if (sym.exists && sym.privateWithin.exists) @@ -547,18 +524,17 @@ class PlainPrinter(_ctx: Context) extends Printer { def locationText(sym: Symbol): Text = if (!sym.exists) "" - else { + else val ownr = sym.effectiveOwner if (ownr.isClass && !isEmptyPrefix(ownr)) " in " ~ toText(ownr) else Text() - } def locatedText(sym: Symbol): Text = (toText(sym) ~ locationText(sym)).close def extendedLocationText(sym: Symbol): Text = if (!sym.exists) "" - else { - def recur(ownr: Symbol, innerLocation: String): Text = { + else + def recur(ownr: Symbol, innerLocation: String): Text = def nextOuter(innerKind: String): Text = recur(ownr.effectiveOwner, if (!innerLocation.isEmpty) innerLocation @@ -571,13 +547,11 @@ class PlainPrinter(_ctx: Context) extends Printer { else if (ownr.isLocalDummy) showLocation(ownr.owner, "locally defined in") else if (ownr.isTerm && !ownr.isOneOf(Module | Method)) showLocation(ownr, "in the initializer of") else showLocation(ownr, "in") - } recur(sym.owner, "") - } def toText(denot: Denotation): Text = toText(denot.symbol) ~ "/D" - private def escapedChar(ch: Char): String = (ch: @switch) match { + private def escapedChar(ch: Char): String = (ch: @switch) match case '\b' => "\\b" case '\t' => "\\t" case '\n' => "\\n" @@ -587,9 +561,8 @@ class PlainPrinter(_ctx: Context) extends Printer { case '\'' => "\\\'" case '\\' => "\\\\" case _ => if ch.isControl then f"${"\\"}u${ch.toInt}%04x" else String.valueOf(ch).nn - } - def toText(const: Constant): Text = const.tag match { + def toText(const: Constant): Text = const.tag match case StringTag => stringText("\"" + escapedString(const.value.toString) + "\"") case ClazzTag => "classOf[" ~ toText(const.typeValue) ~ "]" case CharTag => literalText(s"'${escapedChar(const.charValue)}'") @@ -597,7 +570,6 @@ class PlainPrinter(_ctx: Context) extends Printer { case DoubleTag => literalText(const.doubleValue.toString + "d") case FloatTag => literalText(const.floatValue.toString + "f") case _ => literalText(String.valueOf(const.value).nn) - } /** Usual target for `Annotation#toText`, overridden in RefinedPrinter */ def annotText(annot: Annotation): Text = s"@${annot.symbol.name}" @@ -618,11 +590,10 @@ class PlainPrinter(_ctx: Context) extends Printer { ("Scope{" ~ dclsText(sc.toList) ~ "}").close def toText[T <: Untyped](tree: Tree[T]): Text = { - def toTextElem(elem: Any): Text = elem match { + def toTextElem(elem: Any): Text = elem match case elem: Showable => elem.toText(this) case elem: List[?] => "List(" ~ Text(elem map toTextElem, ",") ~ ")" case elem => elem.toString - } val nodeName = tree.productPrefix val elems = Text(tree.productIterator.map(toTextElem).toList, ", ") @@ -640,19 +611,17 @@ class PlainPrinter(_ctx: Context) extends Printer { else if (pos.source.exists) s"${pos.source.file.name}:${pos.line + 1}" else s"(no source file, offset = ${pos.span.point})" - def toText(result: SearchResult): Text = result match { + def toText(result: SearchResult): Text = result match case result: SearchSuccess => "SearchSuccess: " ~ toText(result.ref) ~ " via " ~ toText(result.tree) case result: SearchFailure => - result.reason match { + result.reason match case _: NoMatchingImplicits => "No Matching Implicit" case _: DivergingImplicit => "Diverging Implicit" case result: AmbiguousImplicits => "Ambiguous Implicit: " ~ toText(result.alt1.ref) ~ " and " ~ toText(result.alt2.ref) case _ => "Search Failure: " ~ toText(result.tree) - } - } def toText(importInfo: ImportInfo): Text = val siteStr = importInfo.site.show @@ -668,26 +637,24 @@ class PlainPrinter(_ctx: Context) extends Printer { try // The current TyperState constraint determines how type variables are printed ctx.typerState.constraint = c - def entryText(tp: Type) = tp match { + def entryText(tp: Type) = tp match case tp: TypeBounds => toText(tp) case _ => " := " ~ toText(tp) - } val indent = 3 val uninstVarsText = " uninstantiated variables: " ~ Text(c.uninstVars.map(toText), ", ") val constrainedText = " constrained types: " ~ Text(c.domainLambdas.map(toText), ", ") val boundsText = - " bounds: " ~ { + " bounds: " `~`: val assocs = for (param <- c.domainParams) yield (" " * indent) ~ toText(param) ~ entryText(c.entry(param)) Text(assocs, "\n") - } val orderingText = - " ordering: " ~ { + " ordering: " `~`: val deps = for { param <- c.domainParams @@ -698,7 +665,6 @@ class PlainPrinter(_ctx: Context) extends Printer { (" " * indent) ~ toText(param) ~ " <: " ~ Text(ups.map(toText), ", ") Text(deps, "\n") - } val depsText = if Config.showConstraintDeps then c.depsToString else "" //Printer.debugPrintUnique = false Text.lines(List(uninstVarsText, constrainedText, boundsText, orderingText, depsText)) @@ -724,5 +690,4 @@ class PlainPrinter(_ctx: Context) extends Printer { if (ctx.useColors) color + text + SyntaxHighlighting.NoColor else text protected def coloredText(text: Text, color: String): Text = if (ctx.useColors) color ~ text ~ SyntaxHighlighting.NoColor else text -} diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index 697ab063a646..a7ee47838194 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -15,7 +15,7 @@ import scala.annotation.internal.sharable /** The base class of all printers */ -abstract class Printer { +abstract class Printer: private var prec: Precedence = GlobalPrec @@ -37,12 +37,11 @@ abstract class Printer { * If the presence of the parentheses depends on precedence, inserting them manually is most certainly a bug. * Use `changePrec` instead to generate them exactly when needed. */ - def atPrec(prec: Precedence)(op: => Text): Text = { + def atPrec(prec: Precedence)(op: => Text): Text = val outerPrec = this.prec this.prec = prec try op finally this.prec = outerPrec - } /** Generate text using `op`, assuming a given precedence level `prec`. * If new level `prec` is lower than previous level, put text in parentheses. @@ -190,11 +189,9 @@ abstract class Printer { /** The context in which this printer operates */ def printerContext: Context -} -object Printer { +object Printer: /** Debug hook; set to true if you want to see unique ids but cannot run with option * -uniqid. A typical use case is for further exploration after a -Ytest-pickler failure. */ @sharable var debugPrintUnique: Boolean = false -} diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 51aaa0932e5e..b65a1d9989a9 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -31,7 +31,7 @@ import dotty.tools.dotc.util.SourcePosition import dotty.tools.dotc.ast.untpd.{MemberDef, Modifiers, PackageDef, RefTree, Template, TypeDef, ValOrDefDef} import cc.{CaptureSet, toCaptureSet, IllegalCaptureRef} -class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { +class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx): /** A stack of enclosing DefDef, TypeDef, or ClassDef, or ModuleDefs nodes */ private var enclosingDef: untpd.Tree = untpd.EmptyTree @@ -41,46 +41,40 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { override def printerContext: Context = myCtx - def withEnclosingDef(enclDef: Tree[?])(op: => Text): Text = { + def withEnclosingDef(enclDef: Tree[?])(op: => Text): Text = val savedCtx = myCtx if (enclDef.hasType && enclDef.symbol.exists) myCtx = ctx.withOwner(enclDef.symbol) val savedDef = enclosingDef enclosingDef = enclDef - try op finally { + try op finally myCtx = savedCtx enclosingDef = savedDef - } - } - def inPattern(op: => Text): Text = { + def inPattern(op: => Text): Text = val savedCtx = myCtx myCtx = ctx.addMode(Mode.Pattern) try op finally myCtx = savedCtx - } inline def inContextBracket(inline op: Text): Text = val savedCtx = myCtx try op finally myCtx = savedCtx - def withoutPos(op: => Text): Text = { + def withoutPos(op: => Text): Text = val savedPrintPos = printPos printPos = false try op finally printPos = savedPrintPos - } - protected def enclDefIsClass: Boolean = enclosingDef match { + protected def enclDefIsClass: Boolean = enclosingDef match case owner: TypeDef => owner.isClassDef case owner: untpd.ModuleDef => true case _ => false - } - protected def PrintableFlags(isType: Boolean): FlagSet = { + protected def PrintableFlags(isType: Boolean): FlagSet = val fs = if (isType) TypeSourceModifierFlags | Module | Local // DOTTY problem: cannot merge these two statements else TermSourceModifierFlags | Module | Local fs.toCommonFlags - } override def nameString(name: Name): String = def strippedName = if printDebug then name else name.stripModuleClassSuffix @@ -95,29 +89,26 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if !sym.exists || isEmptyPrefix(sym.effectiveOwner) then nameString(sym) else super.fullNameString(sym) - override protected def fullNameOwner(sym: Symbol): Symbol = { + override protected def fullNameOwner(sym: Symbol): Symbol = val owner = super.fullNameOwner(sym) if (owner.is(ModuleClass)) owner.sourceModule else owner - } - override def toTextRef(tp: SingletonType): Text = controlled { - tp match { + override def toTextRef(tp: SingletonType): Text = controlled: + tp match case tp: ThisType if !printDebug => if (tp.cls.isAnonymousClass) keywordStr("this") if (tp.cls.is(ModuleClass)) fullNameString(tp.cls.sourceModule) else super.toTextRef(tp) case _ => super.toTextRef(tp) - } - } - override def toTextPrefix(tp: Type): Text = controlled { + override def toTextPrefix(tp: Type): Text = controlled: def isOmittable(sym: Symbol) = if printDebug then false else if homogenizedView then isEmptyPrefix(sym) // drop and anonymous classes, but not scala, Predef. else if sym.isPackageObject then isOmittablePrefix(sym.owner) else isOmittablePrefix(sym) - tp match { + tp match case tp: ThisType if isOmittable(tp.cls) => "" case tp @ TermRef(pre, _) => @@ -126,8 +117,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { else if (isOmittable(sym)) "" else super.toTextPrefix(tp) case _ => super.toTextPrefix(tp) - } - } override protected def toTextParents(parents: List[Type]): Text = Text(parents.map(toTextLocal).map(typeText), keywordStr(" with ")) @@ -135,11 +124,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { override protected def refinementNameString(tp: RefinedType): String = if (tp.parent.isInstanceOf[WildcardType] || tp.refinedName == nme.WILDCARD) super.refinementNameString(tp) - else { + else val tsym = tp.parent.member(tp.refinedName).symbol if (!tsym.exists) super.refinementNameString(tp) else simpleNameString(tsym) - } private def arrow(isGiven: Boolean, isPure: Boolean): String = (if isGiven then "?" else "") + (if isPure then "->" else "=>") @@ -151,7 +139,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { val capturesRoot = refs == rootSetText val isPure = Feature.pureFunsEnabled && !tsym.name.isImpureFunction && !capturesRoot - changePrec(GlobalPrec) { + changePrec(GlobalPrec): val argStr: Text = if args.length == 2 && !defn.isTupleNType(args.head) @@ -166,12 +154,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { ~ " " ~ arrow(isGiven, isPure) ~ (refs provided !capturesRoot) ~ " " ~ argText(args.last) - } private def toTextMethodAsFunction(info: Type, isPure: Boolean, refs: Text = Str("")): Text = info match case info: MethodType => val capturesRoot = refs == rootSetText - changePrec(GlobalPrec) { + changePrec(GlobalPrec): "(" ~ paramsText(info) ~ ") " @@ -179,18 +166,16 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { ~ (refs provided !capturesRoot) ~ " " ~ toTextMethodAsFunction(info.resultType, isPure) - } case info: PolyType => - changePrec(GlobalPrec) { + changePrec(GlobalPrec): "[" ~ paramsText(info) ~ "] => " ~ toTextMethodAsFunction(info.resultType, isPure) - } case _ => toText(info) - override def toText(tp: Type): Text = controlled { + override def toText(tp: Type): Text = controlled: def toTextTuple(args: List[Type]): Text = "(" ~ argsText(args) ~ ")" @@ -207,24 +192,21 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case _ => false def tyconName(tp: Type): Name = tp.typeSymbol.name - def checkAssocMismatch(tp: Type, isRightAssoc: Boolean) = tp match { + def checkAssocMismatch(tp: Type, isRightAssoc: Boolean) = tp match case AppliedType(tycon, _) => isInfixType(tp) && tyconName(tycon).endsWith(":") != isRightAssoc case AndType(_, _) => isRightAssoc case OrType(_, _) => isRightAssoc case _ => false - } - def toTextInfixType(opName: Name, l: Type, r: Type)(op: => Text): Text = { + def toTextInfixType(opName: Name, l: Type, r: Type)(op: => Text): Text = val isRightAssoc = opName.endsWith(":") val opPrec = parsing.precedence(opName) - changePrec(opPrec) { + changePrec(opPrec): val leftPrec = if (isRightAssoc || checkAssocMismatch(l, isRightAssoc)) opPrec + 1 else opPrec val rightPrec = if (!isRightAssoc || checkAssocMismatch(r, isRightAssoc)) opPrec + 1 else opPrec atPrec(leftPrec) { argText(l) } ~ " " ~ op ~ " " ~ atPrec(rightPrec) { argText(r) } - } - } def appliedText(tp: Type): Text = tp match case tp @ AppliedType(tycon, args) => @@ -243,7 +225,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case _ => Str("") - homogenize(tp) match { + homogenize(tp) match case tp: AppliedType => val refined = appliedText(tp) if refined.isEmpty then super.toText(tp) else refined @@ -252,9 +234,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case AndType(tp1, tp2) => toTextInfixType(tpnme.raw.AMP, tp1, tp2) { toText(tpnme.raw.AMP) } case tp @ OrType(tp1, tp2) => - toTextInfixType(tpnme.raw.BAR, tp1, tp2) { + toTextInfixType(tpnme.raw.BAR, tp1, tp2): if tp.isSoft && printDebug then toText(tpnme.ZOR) else toText(tpnme.raw.BAR) - } case tp @ EtaExpansion(tycon) if !printDebug && appliedText(tp.asInstanceOf[HKLambda].resType).isEmpty => // don't eta contract if the application would be printed specially @@ -266,11 +247,10 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (tp.symbol.isAnonymousClass && !showUniqueIds) toText(tp.info) else if (tp.symbol.is(Param)) - tp.prefix match { + tp.prefix match case pre: ThisType if pre.cls == tp.symbol.owner => nameString(tp.symbol) case _ => super.toText(tp) - } else super.toText(tp) case ErasedValueType(tycon, underlying) => "ErasedValueType(" ~ toText(tycon) ~ ", " ~ toText(underlying) ~ ")" @@ -306,8 +286,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { "[applied to [" ~ toTextGlobal(targs, ", ") ~ "] returning " ~ toText(resType) case _ => super.toText(tp) - } - } protected def exprToText(tp: ExprType): Text = "=> " ~ toText(tp.resType) @@ -320,69 +298,59 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { blockText(block.stats :+ block.expr) protected def blockText[T <: Untyped](trees: List[Tree[T]]): Text = - inContextBracket { + inContextBracket: ("{" ~ toText(trees, "\n") ~ "}").close - } - protected def typeApplyText[T <: Untyped](tree: TypeApply[T]): Text = { + protected def typeApplyText[T <: Untyped](tree: TypeApply[T]): Text = val funText = toTextLocal(tree.fun) - tree.fun match { + tree.fun match case Select(New(tpt), nme.CONSTRUCTOR) if tpt.typeOpt.dealias.isInstanceOf[AppliedType] => funText // type was already printed by toText(new) case _ => funText ~ "[" ~ toTextGlobal(tree.args, ", ") ~ "]" - } - } - protected def toTextCore[T <: Untyped](tree: Tree[T]): Text = { + protected def toTextCore[T <: Untyped](tree: Tree[T]): Text = import untpd._ - def isLocalThis(tree: Tree) = tree.typeOpt match { + def isLocalThis(tree: Tree) = tree.typeOpt match case tp: ThisType => tp.cls == ctx.owner.enclosingClass case _ => false - } def optDotPrefix(tree: This) = optText(tree.qual)(_ ~ ".") provided !isLocalThis(tree) - def caseBlockText(tree: Tree): Text = tree match { + def caseBlockText(tree: Tree): Text = tree match case Block(stats, expr) => toText(stats :+ expr, "\n") case expr => toText(expr) - } // Dotty deviation: called with an untpd.Tree, so cannot be a untpd.Tree[T] (seems to be a Scala2 problem to allow this) // More deviations marked below as // DD - def enumText(tree: untpd.Tree) = tree match { // DD + def enumText(tree: untpd.Tree) = tree match // DD case _: untpd.GenFrom | _: untpd.GenAlias => toText(tree) case _ => keywordStr("if ") ~ toText(tree) - } def forText(enums: List[untpd.Tree], expr: untpd.Tree, sep: String): Text = // DD changePrec(GlobalPrec) { keywordStr("for ") ~ Text(enums map enumText, "; ") ~ sep ~ toText(expr) } - def cxBoundToText(bound: untpd.Tree): Text = bound match { // DD + def cxBoundToText(bound: untpd.Tree): Text = bound match // DD case AppliedTypeTree(tpt, _) => " : " ~ toText(tpt) case untpd.Function(_, tpt) => " <% " ~ toText(tpt) - } def varianceText(mods: untpd.Modifiers) = if (mods.is(Covariant)) "+" else if (mods.is(Contravariant)) "-" else "" - def argText(arg: Tree): Text = arg match { + def argText(arg: Tree): Text = arg match case arg: TypeBoundsTree => "?" ~ toTextGlobal(arg) case arg: TypeTree => - arg.typeOpt match { + arg.typeOpt match case tp: TypeBounds => "?" ~ toTextGlobal(arg) case _ => toTextGlobal(arg) - } case _ => toTextGlobal(arg) - } - def dropBlock(tree: Tree): Tree = tree match { + def dropBlock(tree: Tree): Tree = tree match case Block(Nil, expr) => expr case _ => tree - } def importText(expr: Tree, selectors: List[untpd.ImportSelector]) = @@ -409,23 +377,21 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { toTextLocal(expr) ~ "." ~ selectorsText - tree match { + tree match case id: Trees.SearchFailureIdent[?] => - tree.typeOpt match { + tree.typeOpt match case reason: Implicits.SearchFailureType => Str(id.explanation) ~ ("summon[" ~ toText(reason.clarify(reason.expectedType)) ~ "]").close case _ => Str(id.explanation) - } case id @ Ident(name) => - val txt = tree.typeOpt match { + val txt = tree.typeOpt match case tp: NamedType if name != nme.WILDCARD => val pre = if (tp.symbol.is(JavaStatic)) tp.prefix.widen else tp.prefix toTextPrefix(pre) ~ withPos(selectionString(tp), tree.sourcePos) case _ => toText(name) - } if (isBackquoted(tree) && !homogenizedView) "`" ~ toText(name) ~ "`" else if (name.isTypeName) typeText(txt) else txt @@ -438,9 +404,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { optDotPrefix(qual) ~ keywordStr("super") ~ optText(mix)("[" ~ _ ~ "]") case app @ Apply(fun, args) => if (fun.hasType && fun.symbol == defn.throwMethod) - changePrec (GlobalPrec) { + changePrec (GlobalPrec): keywordStr("throw ") ~ toText(args.head) - } else toTextLocal(fun) ~ "(" @@ -450,19 +415,16 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case tree: TypeApply => typeApplyText(tree) case Literal(c) => - tree.typeOpt match { + tree.typeOpt match case ConstantType(tc) => withPos(toText(tc), tree.sourcePos) case _ => withPos(toText(c), tree.sourcePos) - } case New(tpt) => - keywordStr("new ") ~ { - tpt match { + keywordStr("new ") `~`: + tpt match case tpt: Template => toTextTemplate(tpt, ofNew = true) case _ => toTextLocal(tpt) - } - } case Typed(expr, tpt) => - changePrec(InfixPrec) { + changePrec(InfixPrec): if isWildcardStarArg(tree) then expr match case Ident(nme.WILDCARD_STAR) => @@ -476,7 +438,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { val line = exprText.lastLine val colon = if !line.isEmpty && isOperatorPart(line.last) then " :" else ":" exprText ~ colon ~ toText(tpt) - } case NamedArg(name, arg) => toText(name) ~ " = " ~ toText(arg) case Assign(lhs, rhs) => @@ -485,25 +446,23 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { blockToText(block) case If(cond, thenp, elsep) => val isInline = tree.isInstanceOf[Trees.InlineIf[?]] - changePrec(GlobalPrec) { + changePrec(GlobalPrec): keywordStr(if (isInline) "inline if " else "if ") ~ toText(cond) ~ (keywordText(" then") provided !cond.isInstanceOf[Parens]) ~~ toText(thenp) ~ optText(elsep)(keywordStr(" else ") ~ _) - } case Closure(env, ref, target) => "closure(" ~ (toTextGlobal(env, ", ") ~ " | " provided env.nonEmpty) ~ toTextGlobal(ref) ~ (":" ~ toText(target) provided !target.isEmpty) ~ ")" case Match(sel, cases) => val isInline = tree.isInstanceOf[Trees.InlineMatch[?]] if (sel.isEmpty && !isInline) blockText(cases) - else changePrec(GlobalPrec) { + else changePrec(GlobalPrec): val selTxt: Text = if (isInline) if (sel.isEmpty) keywordStr("implicit") else keywordStr("inline ") ~ toText(sel) else toText(sel) selTxt ~ keywordStr(" match ") ~ blockText(cases) - } case CaseDef(pat, guard, body) => keywordStr("case ") ~ inPattern(toText(pat)) ~ optText(guard)(keywordStr(" if ") ~ _) ~ " => " ~ caseBlockText(body) case Labeled(bind, expr) => @@ -517,13 +476,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case WhileDo(cond, body) => changePrec(GlobalPrec) { keywordStr("while ") ~ toText(cond) ~ keywordStr(" do ") ~ toText(body) } case Try(expr, cases, finalizer) => - changePrec(GlobalPrec) { + changePrec(GlobalPrec): keywordStr("try ") ~ toText(expr) ~ optText(cases)(keywordStr(" catch ") ~ _) ~ optText(finalizer)(keywordStr(" finally ") ~ _) - } case Throw(expr) => - changePrec(GlobalPrec) { + changePrec(GlobalPrec): keywordStr("throw ") ~ toText(expr) - } case SeqLiteral(elems, elemtpt) => "[" ~ toTextGlobal(elems, ",") ~ " : " ~ toText(elemtpt) ~ "]" case tree @ Inlined(call, bindings, body) => @@ -554,18 +511,15 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case _ => toTextLocal(tpt) ~ "[" ~ Text(args.map(argText), ", ") ~ "]" case LambdaTypeTree(tparams, body) => - changePrec(GlobalPrec) { + changePrec(GlobalPrec): paramsText(tparams) ~ " =>> " ~ toText(body) - } case TermLambdaTypeTree(params, body) => - changePrec(GlobalPrec) { + changePrec(GlobalPrec): paramsText(params) ~ " =>> " ~ toText(body) - } case MatchTypeTree(bound, sel, cases) => - changePrec(GlobalPrec) { + changePrec(GlobalPrec): toText(sel) ~ keywordStr(" match ") ~ blockText(cases) ~ (" <: " ~ toText(bound) provided !bound.isEmpty) - } case ImpureByNameTypeTree(tpt) => "=> " ~ toTextLocal(tpt) case ByNameTypeTree(tpt) => @@ -579,10 +533,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case Alternative(trees) => changePrec(OrPrec) { toText(trees, " | ") } case UnApply(fun, implicits, patterns) => - val extractor = fun match { + val extractor = fun match case Select(extractor, name) if name.isUnapplyName => extractor case _ => fun - } toTextLocal(extractor) ~ "(" ~ toTextGlobal(patterns, ", ") ~ ")" ~ ("(" ~ toTextGlobal(implicits, ", ") ~ ")" provided implicits.nonEmpty) @@ -592,12 +545,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { defDefToText(tree) case tree @ TypeDef(name, rhs) => def typeDefText(tparamsText: => Text, rhsText: => Text) = - dclTextOr(tree) { + dclTextOr(tree): modText(tree.mods, tree.symbol, keywordStr("type"), isType = true) ~~ (varianceText(tree.mods) ~ typeText(nameIdText(tree))) ~ withEnclosingDef(tree) { tparamsText ~ rhsText } - } - def recur(rhs: Tree, tparamsTxt: => Text, printMemberArgs: Boolean): Text = rhs match { + def recur(rhs: Tree, tparamsTxt: => Text, printMemberArgs: Boolean): Text = rhs match case impl: Template => templateText(tree, impl) case rhs: TypeBoundsTree => @@ -608,7 +560,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { typeDefText(tparamsTxt, toText(rhs)) case rhs => typeDefText(tparamsTxt, optText(rhs)(" = " ~ _)) - } recur(rhs, "", true) case tree @ Import(expr, selectors) => myCtx = myCtx.importContext(tree, tree.symbol) @@ -640,27 +591,24 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (printDebug) "[" ~ toText(t) ~ "]#TS#" else toText(t) case tree @ ModuleDef(name, impl) => - withEnclosingDef(tree) { + withEnclosingDef(tree): modText(tree.mods, NoSymbol, keywordStr("object"), isType = false) ~~ nameIdText(tree) ~ toTextTemplate(impl) - } case SymbolLit(str) => "'" + str case InterpolatedString(id, segments) => def strText(str: Literal) = withPos(escapedString(str.const.stringValue), tree.sourcePos) - def segmentText(segment: Tree) = segment match { + def segmentText(segment: Tree) = segment match case Thicket(List(str: Literal, expr)) => strText(str) ~ "{" ~ toTextGlobal(expr) ~ "}" case str: Literal => strText(str) - } toText(id) ~ "\"" ~ Text(segments map segmentText, "") ~ "\"" case fn @ Function(args, body) => var implicitSeen: Boolean = false var isGiven: Boolean = false - val erasedParams = fn match { + val erasedParams = fn match case fn: FunctionWithMods => fn.erasedParams case _ => fn.args.map(_ => false) - } - def argToText(arg: Tree, isErased: Boolean) = arg match { + def argToText(arg: Tree, isErased: Boolean) = arg match case arg @ ValDef(name, tpt, _) => val implicitText = if ((arg.mods.is(Given))) { isGiven = true; "" } @@ -670,14 +618,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { implicitText ~ erasedText ~ toText(name) ~ optAscription(tpt) case _ => toText(arg) - } - val argsText = args match { + val argsText = args match case (arg @ ValDef(_, tpt, _)) :: Nil if tpt.isEmpty => argToText(arg, erasedParams(0)) case _ => "(" ~ Text(args.zip(erasedParams).map(argToText), ", ") ~ ")" - } val isPure = Feature.pureFunsEnabled && tree.match @@ -686,9 +632,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { argsText ~ " " ~ arrow(isGiven, isPure) ~ " " ~ toText(body) case PolyFunction(targs, body) => val targsText = "[" ~ Text(targs.map((arg: Tree) => toText(arg)), ", ") ~ "]" - changePrec(GlobalPrec) { + changePrec(GlobalPrec): targsText ~ " => " ~ toText(body) - } case InfixOp(l, op, r) => val opPrec = parsing.precedence(op.name) changePrec(opPrec) { toText(l) ~ " " ~ toText(op) ~ " " ~ toText(r) } @@ -717,9 +662,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { modText(mods, NoSymbol, keywordStr("val"), isType = false) ~~ toText(pats, ", ") ~ optAscription(tpt) ~ optText(rhs)(" = " ~ _) case ParsedTry(expr, handler, finalizer) => - changePrec(GlobalPrec) { + changePrec(GlobalPrec): keywordStr("try ") ~ toText(expr) ~ " " ~ keywordStr("catch") ~ " {" ~ toText(handler) ~ "}" ~ optText(finalizer)(keywordStr(" finally ") ~ _) - } case Number(digits, kind) => digits case Thicket(trees) => @@ -737,10 +681,9 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { keywordStr("$") ~ spliceTypeText ~ keywordStr("{") ~ toTextGlobal(expr) ~ keywordStr("}") case SplicePattern(pattern, args) => val spliceTypeText = (keywordStr("[") ~ toTextGlobal(tree.typeOpt) ~ keywordStr("]")).provided(printDebug && tree.typeOpt.exists) - keywordStr("$") ~ spliceTypeText ~ { + keywordStr("$") ~ spliceTypeText `~`: if args.isEmpty then keywordStr("{") ~ inPattern(toText(pattern)) ~ keywordStr("}") else toText(pattern.symbol.name) ~ "(" ~ toTextGlobal(args, ", ") ~ ")" - } case Hole(isTerm, idx, args, content) => val (prefix, postfix) = if isTerm then ("{{{", "}}}") else ("[[[", "]]]") val argsText = toTextGlobal(args, ", ") @@ -751,8 +694,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec(GlobalPrec)("^{" ~ Text(refs.map(toText), ", ") ~ "}" ~ toText(parent)) case _ => tree.fallbackToText(this) - } - } override protected def toTextCapturing(tp: Type, refsText: Text, boxText: Text): Text = tp match case tp: AppliedType if defn.isFunctionSymbol(tp.typeSymbol) && !printDebug => @@ -762,7 +703,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case _ => super.toTextCapturing(tp, refsText, boxText) - override def toText[T <: Untyped](tree: Tree[T]): Text = controlled { + override def toText[T <: Untyped](tree: Tree[T]): Text = controlled: import untpd._ var txt = toTextCore(tree) @@ -776,23 +717,20 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { // But then pickling and unpickling the original trees will yield trees // with the original types before they are rewritten, which causes a discrepancy. - def suppressPositions = tree match { + def suppressPositions = tree match case _: WithoutTypeOrPos[?] | _: TypeTree => true // TypeTrees never have an interesting position case _ => false - } - if (ctx.settings.XprintTypes.value && tree.hasType) { + if (ctx.settings.XprintTypes.value && tree.hasType) // add type to term nodes; replace type nodes with their types unless -Yprint-pos is also set. - val tp1 = tree.typeOpt match { + val tp1 = tree.typeOpt match case tp: TermRef if tree.isInstanceOf[RefTree] && !tp.denot.isOverloaded => tp.underlying case tp => tp - } - val tp2 = { + val tp2 = val tp = tp1.tryNormalize if (tp != NoType) tp else tp1 - } val tp3 = - if (homogenizedView && tree.isInstanceOf[If | Match | Annotated | Block | CaseDef]) { + if (homogenizedView && tree.isInstanceOf[If | Match | Annotated | Block | CaseDef]) // Types of non-leaf trees are not pickled but reconstructed when // unpickled using the TypeAssigner. Sometimes, this requires choosing // arbitrarily between two =:= types (e.g., when typing an `if`, where @@ -803,20 +741,17 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { // any alias in these types to make -Ytest-pickler work (the list of // types in the isInstanceOf check above is conservative and might // need to be expanded). - val dealiasMap = new TypeMap { + val dealiasMap = new TypeMap: def apply(tp: Type) = mapOver(tp.dealias) - } dealiasMap(tp2) - } else tp2 if (!suppressTypes) txt = ("<" ~ txt ~ ":" ~ toText(tp3) ~ ">").close else if (tree.isType && !homogenizedView) txt = toText(tp3) - } - if (!suppressPositions) { - if (printPos) { + if (!suppressPositions) + if (printPos) val posStr = if (homogenizedView || debugPos) if (tree.isInstanceOf[MemberDef]) Str(s"${tree.source}${tree.span}") @@ -825,18 +760,14 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { "<" ~ toText(tree.sourcePos) ~ ">" val clsStr = ""//if (tree.isType) tree.getClass.toString else "" txt = (txt ~ "@" ~ posStr ~ clsStr).close - } if (ctx.settings.YprintPosSyms.value && tree.isDef) txt = (txt ~ s"@@(${tree.symbol.name}=" ~ tree.symbol.sourcePos.toString ~ ")").close - } if (ctx.settings.YshowTreeIds.value) txt = (txt ~ "#" ~ tree.uniqueId.toString).close - tree match { + tree match case Block(_, _) | Template(_, _, _, _) => txt case _ => txt.close - } - } /** Print modifiers from symbols if tree has type, overriding the behavior in Trees. */ extension (mdef: untpd.DefTree) def mods: untpd.Modifiers = @@ -857,33 +788,30 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case tp: NamedType if !tp.symbol.isStatic => s"%${tp.symbol.nestingLevel}" case tp: TypeVar => s"%${tp.nestingLevel}" case tp: TypeParamRef => ctx.typerState.constraint.typeVarOfParam(tp) match - case tvar: TypeVar => s"%${tvar.nestingLevel}" - case _ => "" + case tvar: TypeVar => s"%${tvar.nestingLevel}" + case _ => "" case _ => "" - else "") + else "") private def useSymbol(tree: untpd.Tree) = tree.hasType && tree.symbol.exists && ctx.settings.YprintSyms.value protected def nameIdText[T <: Untyped](tree: NameTree[T]): Text = - if (tree.hasType && tree.symbol.exists) { + if (tree.hasType && tree.symbol.exists) val str = nameString(tree.symbol) - tree match { + tree match case tree: RefTree => withPos(str, tree.sourcePos) case tree: MemberDef => withPos(str, tree.sourcePos.withSpan(tree.nameSpan)) case _ => str - } - } else toText(tree.name) ~ idText(tree) private def toTextOwner(tree: Tree[?]) = "[owner = " ~ tree.symbol.maybeOwner.show ~ "]" provided ctx.settings.YprintDebugOwners.value protected def dclTextOr[T <: Untyped](tree: Tree[T])(treeText: => Text): Text = - toTextOwner(tree) ~ { + toTextOwner(tree) `~`: if (useSymbol(tree)) annotsText(tree.symbol) ~~ dclText(tree.symbol) else treeText - } def paramsText[T <: Untyped](params: ParamClause[T]): Text = (params: @unchecked) match case Nil => @@ -897,20 +825,18 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def addParamssText[T <: Untyped](leading: Text, paramss: List[ParamClause[T]]): Text = paramss.foldLeft(leading)((txt, params) => txt ~ paramsText(params)) - protected def valDefToText[T <: Untyped](tree: ValDef[T]): Text = { - dclTextOr(tree) { + protected def valDefToText[T <: Untyped](tree: ValDef[T]): Text = + dclTextOr(tree): modText(tree.mods, tree.symbol, keywordStr(if (tree.mods.is(Mutable)) "var" else "val"), isType = false) ~~ valDefText(nameIdText(tree)) ~ optAscription(tree.tpt) ~ withEnclosingDef(tree) { optText(tree.rhs)(" = " ~ _) } - } - } - protected def defDefToText[T <: Untyped](tree: DefDef[T]): Text = { + protected def defDefToText[T <: Untyped](tree: DefDef[T]): Text = import untpd._ - dclTextOr(tree) { + dclTextOr(tree): val defKeyword = modText(tree.mods, tree.symbol, keywordStr("def"), isType = false) val isExtension = tree.hasType && tree.symbol.is(ExtensionMethod) - withEnclosingDef(tree) { + withEnclosingDef(tree): val coreSig = if isExtension then val paramss = @@ -952,47 +878,40 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { addParamssText( addParamssText(keywordStr("extension "), leadingParamss) ~~ (defKeyword ~~ valDefText(nameIdText(tree))).close, - trailingParamss) + trailingParamss) else addParamssText(defKeyword ~~ valDefText(nameIdText(tree)), tree.paramss) coreSig ~ optAscription(tree.tpt) ~ optText(tree.rhs)(" = " ~ keywordText("macro ").provided(tree.symbol.isScala2Macro) ~ _) - } - } - } - protected def toTextTemplate(impl: Template, ofNew: Boolean = false): Text = { + protected def toTextTemplate(impl: Template, ofNew: Boolean = false): Text = val Template(constr @ DefDef(_, paramss, _, _), _, self, _) = impl - val tparamsTxt = withEnclosingDef(constr) { + val tparamsTxt = withEnclosingDef(constr): paramsText(constr.leadingTypeParams) provided constr.leadingTypeParams.nonEmpty - } val primaryConstrs = if (constr.rhs.isEmpty) Nil else constr :: Nil val prefix: Text = if (constr.trailingParamss.isEmpty || primaryConstrs.nonEmpty) tparamsTxt - else { + else var modsText = modText(constr.mods, constr.symbol, "", isType = false) if (!modsText.isEmpty) modsText = " " ~ modsText if (constr.mods.hasAnnotations && !constr.mods.hasFlags) modsText = modsText ~~ " this" withEnclosingDef(constr) { addParamssText(tparamsTxt ~~ modsText, constr.trailingParamss) } - } val parentsText = Text(impl.parents.map(constrText), if (ofNew) keywordStr(" with ") else ", ") val derivedText = Text(impl.derived.map(toText(_)), ", ") val selfText = { val selfName = if (self.name == nme.WILDCARD) keywordStr("this") else self.name.toString (selfName ~ optText(self.tpt)(": " ~ _) ~ " =>").close }.provided(!self.isEmpty) - val body = if (ctx.settings.YtestPickler.value) { + val body = if (ctx.settings.YtestPickler.value) // Pickling/unpickling reorders the body members, so we need to homogenize - val (params, rest) = impl.body partition { + val (params, rest) = impl.body partition: case stat: TypeDef => stat.symbol.is(Param) case stat: ValOrDefDef => stat.symbol.is(ParamAccessor) && !stat.symbol.isSetter case _ => false - } params ::: rest - } else impl.body val bodyText = inContextBracket( @@ -1002,29 +921,25 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { keywordText(" extends").provided(!ofNew && impl.parents.nonEmpty) ~~ parentsText ~ keywordText(" derives").provided(impl.derived.nonEmpty) ~~ derivedText ~ bodyText - } - protected def templateText(tree: TypeDef, impl: Template): Text = { + protected def templateText(tree: TypeDef, impl: Template): Text = val kw = if tree.mods.is(Trait) then "trait" else "class" val decl = modText(tree.mods, tree.symbol, keywordStr(kw), isType = true) ( decl ~~ typeText(nameIdText(tree)) ~ withEnclosingDef(tree) { toTextTemplate(impl) } // ~ (if (tree.hasType && printDebug) i"[decls = ${tree.symbol.info.decls}]" else "") // uncomment to enable ) - } protected def toTextPackageId[T <: Untyped](pid: Tree[T]): Text = if (homogenizedView && pid.hasType) toTextLocal(pid.typeOpt) else toTextLocal(pid) - protected def packageDefText(tree: PackageDef): Text = { - val statsText = tree.stats match { + protected def packageDefText(tree: PackageDef): Text = + val statsText = tree.stats match case (pdef: PackageDef) :: Nil => toText(pdef) case _ => inContextBracket(toTextGlobal(tree.stats, "\n")) - } val bodyText = if (currentPrecedence == TopLevelPrec) "\n" ~ statsText else " {" ~ statsText ~ "}" keywordStr("package ") ~ toTextPackageId(tree.pid) ~ bodyText - } /** Textual representation of an instance creation expression without the leading `new` */ protected def constrText(tree: untpd.Tree): Text = toTextLocal(tree).stripPrefix(keywordStr("new ")) // DD @@ -1043,7 +958,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { s"@${if annotSym.exists then annotSym.name.toString else t.show}" recur(tree) - protected def modText(mods: untpd.Modifiers, sym: Symbol, kw: String, isType: Boolean): Text = { // DD + protected def modText(mods: untpd.Modifiers, sym: Symbol, kw: String, isType: Boolean): Text = // DD val suppressKw = if (enclDefIsClass) mods.isAllOf(LocalParam) else mods.is(Param) var flagMask = if (ctx.settings.YdebugFlags.value) AnyFlags @@ -1060,7 +975,6 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { else mods.annotations.filterNot(tree => dropAnnotForModText(tree.symbol)).map(annotText(NoSymbol, _)) Text(annotTexts, " ") ~~ flagsText ~~ (Str(kw) provided !suppressKw) - } override def annotText(annot: Annotation): Text = annotText(annot.symbol, annot.tree) @@ -1079,13 +993,12 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { sym.isType && (sym.isAllOf(ProtectedLocal)) && (sym.allOverriddenSymbols exists (_.is(TypeParam))) - override def toText(sym: Symbol): Text = { + override def toText(sym: Symbol): Text = if (sym.isImport) - sym.infoOrCompleter match { + sym.infoOrCompleter match case info: Namer#Completer => return info.original.show case info: ImportType => return s"import ${info.expr.show}" case _ => - } def name = if (printDebug) nameString(sym) @@ -1105,51 +1018,44 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { else typeText(name) }).close - } /** String representation of symbol's kind. */ - override def kindString(sym: Symbol): String = { + override def kindString(sym: Symbol): String = val flags = sym.flagsUNSAFE if (flags.is(Package)) "package" else if (sym.isPackageObject) "package object" else if (flags.is(Module)) "object" else if (sym.isClassConstructor) "constructor" else super.kindString(sym) - } /** String representation of symbol's definition keyword */ - override protected def keyString(sym: Symbol): String = { + override protected def keyString(sym: Symbol): String = val flags = sym.flagsUNSAFE if (sym.isType && sym.owner.isTerm) "" else if (sym.isPackageObject) "package object" else if (flags.is(Module) && flags.is(Case)) "case object" else if (sym.isClass && flags.is(Case)) "case class" else super.keyString(sym) - } override def toTextFlags(sym: Symbol): Text = if (ctx.settings.YdebugFlags.value) super.toTextFlags(sym) - else { + else var flags = sym.flagsUNSAFE if (flags.is(TypeParam)) flags = flags &~ Protected toTextFlags(sym, flags & PrintableFlags(sym.isType)) - } - override def toText(denot: Denotation): Text = denot match { + override def toText(denot: Denotation): Text = denot match case denot: MultiDenotation => Text(denot.alternatives.map(dclText), " ") case NoDenotation => "NoDenotation" case _ => if (denot.symbol.exists) toText(denot.symbol) else "some " ~ toText(denot.info) - } override def plain: PlainPrinter = new PlainPrinter(_ctx) private def withPos(txt: Text, pos: SourcePosition): Text = if (!printLines || !pos.exists) txt - else txt match { + else txt match case Str(s, _) => Str(s, LineRange(pos.line, pos.endLine)) case _ => txt - } -} diff --git a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala index ea3afef27fae..09445698eb00 100644 --- a/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/ReplPrinter.scala @@ -11,7 +11,7 @@ import dotty.tools.dotc.core.Types._ import dotty.tools.dotc.printing.Texts._ -class ReplPrinter(_ctx: Context) extends RefinedPrinter(_ctx) { +class ReplPrinter(_ctx: Context) extends RefinedPrinter(_ctx): val debugPrint = _ctx.settings.YprintDebug.value @@ -33,29 +33,24 @@ class ReplPrinter(_ctx: Context) extends RefinedPrinter(_ctx) { override def dclText(sym: Symbol): Text = if (debugPrint) super.dclText(sym) else ("lazy": Text).provided(sym.is(Lazy)) ~~ - toText(sym) ~ { - if (sym.is(Method)) { - sym.info match { + toText(sym) `~`: + if (sym.is(Method)) + sym.info match case tp: ExprType => ":" ~~ toText(tp.resType) case info => toText(info) - } - } else if (sym.isType && sym.info.isTypeAlias) toText(sym.info) else if (sym.isType || sym.isClass) "" else ":" ~~ toText(sym.info) - } override def toTextSingleton(tp: SingletonType): Text = if (debugPrint) super.toTextSingleton(tp) else - tp match { + tp match case ConstantType(const) => toText(const) case _ => toTextRef(tp) ~ ".type" - } // We don't want the colors coming from RefinedPrinter as the REPL uses its // own syntax coloring mechanism. override def coloredStr(text: String, color: String): String = text override def coloredText(text: Text, color: String): Text = text -} diff --git a/compiler/src/dotty/tools/dotc/printing/Showable.scala b/compiler/src/dotty/tools/dotc/printing/Showable.scala index 4a0e68861a1a..75302cac7983 100644 --- a/compiler/src/dotty/tools/dotc/printing/Showable.scala +++ b/compiler/src/dotty/tools/dotc/printing/Showable.scala @@ -6,7 +6,7 @@ import core._ import Contexts._, Texts._, Decorators._ import config.Config.summarizeDepth -trait Showable extends Any { +trait Showable extends Any: /** The text representation of this showable element. * This normally dispatches to a pattern matching @@ -33,4 +33,3 @@ trait Showable extends Any { */ def showSummary(depth: Int = summarizeDepth)(using Context): String = show(using ctx.fresh.setProperty(MessageLimiter, SummarizeMessageLimiter(depth))) -} diff --git a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala index 53e6b9472f5e..2ce80cbb4840 100644 --- a/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala +++ b/compiler/src/dotty/tools/dotc/printing/SyntaxHighlighting.scala @@ -16,7 +16,7 @@ import dotty.tools.dotc.util.SourceFile import java.util.Arrays /** This object provides functions for syntax highlighting in the REPL */ -object SyntaxHighlighting { +object SyntaxHighlighting: /** if true, log erroneous positions being highlighted */ private inline val debug = true @@ -31,10 +31,10 @@ object SyntaxHighlighting { val TypeColor: String = Console.MAGENTA val AnnotationColor: String = Console.MAGENTA - def highlight(in: String)(using Context): String = { + def highlight(in: String)(using Context): String = def freshCtx = ctx.fresh.setReporter(Reporter.NoReporter) if (in.isEmpty || ctx.settings.color.value == "never") in - else { + else val source = SourceFile.virtual("", in) given Context = freshCtx @@ -46,15 +46,14 @@ object SyntaxHighlighting { Arrays.fill(colorAt.asInstanceOf[Array[AnyRef]], from, to, color) def highlightPosition(span: Span, color: String) = if (span.exists) - if (span.start < 0 || span.end > in.length) { + if (span.start < 0 || span.end > in.length) if (debug) println(s"Trying to highlight erroneous position $span. Input size: ${in.length}") - } else highlightRange(span.start, span.end, color) val scanner = new Scanner(source) - while (scanner.token != EOF) { + while (scanner.token != EOF) val start = scanner.offset val token = scanner.token val name = scanner.name @@ -64,7 +63,7 @@ object SyntaxHighlighting { // Branch order is important. For example, // `true` is at the same time a keyword and a literal - token match { + token match case _ if literalTokens.contains(token) => highlightRange(start, end, LiteralColor) @@ -80,20 +79,17 @@ object SyntaxHighlighting { highlightRange(start, end, Console.RED_B) case _ => - } - } for (span <- scanner.commentSpans) highlightPosition(span, CommentColor) - object TreeHighlighter extends untpd.UntypedTreeTraverser { + object TreeHighlighter extends untpd.UntypedTreeTraverser: import untpd._ - def ignored(tree: NameTree) = { + def ignored(tree: NameTree) = val name = tree.name.toTermName // trees named and have weird positions name == nme.ERROR || name == nme.CONSTRUCTOR - } def highlightAnnotations(tree: MemberDef): Unit = for (annotation <- tree.rawMods.annotations) @@ -102,8 +98,8 @@ object SyntaxHighlighting { def highlight(trees: List[Tree])(using Context): Unit = trees.foreach(traverse) - def traverse(tree: Tree)(using Context): Unit = { - tree match { + def traverse(tree: Tree)(using Context): Unit = + tree match case tree: NameTree if ignored(tree) => () case tree: ValOrDefDef => @@ -119,10 +115,7 @@ object SyntaxHighlighting { case _: TypeTree => highlightPosition(tree.span, TypeColor) case _ => - } traverseChildren(tree) - } - } val parser = new Parser(source) val trees = parser.blockStatSeq() @@ -130,18 +123,14 @@ object SyntaxHighlighting { val highlighted = new StringBuilder() - for (idx <- colorAt.indices) { + for (idx <- colorAt.indices) val prev = if (idx == 0) NoColor else colorAt(idx - 1) val curr = colorAt(idx) if (curr != prev) highlighted.append(curr) highlighted.append(in(idx)) - } if (colorAt.last != NoColor) highlighted.append(NoColor) highlighted.toString - } - } -} diff --git a/compiler/src/dotty/tools/dotc/printing/Texts.scala b/compiler/src/dotty/tools/dotc/printing/Texts.scala index 475e2c6900d5..04726dc4c564 100644 --- a/compiler/src/dotty/tools/dotc/printing/Texts.scala +++ b/compiler/src/dotty/tools/dotc/printing/Texts.scala @@ -2,22 +2,21 @@ package dotty.tools.dotc package printing import scala.annotation.internal.sharable -object Texts { +object Texts: @sharable private val ansi = java.util.regex.Pattern.compile("\u001b\\[\\d+m").nn - sealed abstract class Text { + sealed abstract class Text: protected def indentMargin: Int = 2 def relems: List[Text] - def isEmpty: Boolean = this match { + def isEmpty: Boolean = this match case Str(s, _) => s.isEmpty case Fluid(relems) => relems forall (_.isEmpty) case Vertical(relems) => relems.isEmpty - } // Str Ver Clo Flu // isVertical F T F F @@ -31,7 +30,7 @@ object Texts { def close: Text = if isSplittable then Closed(relems) else this - def remaining(width: Int): Int = this match { + def remaining(width: Int): Int = this match case Str(s, _) => width - lengthWithoutAnsi(s) case Fluid(Nil) => @@ -41,25 +40,21 @@ object Texts { if (r < 0) r else Fluid(prevs) remaining r case Vertical(_) => -1 - } - def lastLine: String = this match { + def lastLine: String = this match case Str(s, _) => s case _ => relems.head.lastLine - } - def appendToLastLine(that: Text): Text = that match { + def appendToLastLine(that: Text): Text = that match case Str(s2, lines1) => - this match { + this match case Str(s1, lines2) => Str(s1 + s2, lines1 union lines2) case Fluid(Str(s1, lines2) :: prev) => Fluid(Str(s1 + s2, lines1 union lines2) :: prev) case Fluid(relems) => Fluid(that :: relems) case Vertical(_) => throw new IllegalArgumentException("Unexpected Vertical.appendToLastLine") - } case Fluid(relems) => relems.reverse.foldLeft(this)(_ appendToLastLine _) case Vertical(_) => throw new IllegalArgumentException("Unexpected Text.appendToLastLine(Vertical(...))") - } private def appendIndented(that: Text)(width: Int): Text = Fluid(that.layout(width - indentMargin).indented :: this.relems) @@ -76,22 +71,20 @@ object Texts { private def lengthWithoutAnsi(str: String): Int = ansi.matcher(str).nn.replaceAll("").nn.length - def layout(width: Int): Text = this match { + def layout(width: Int): Text = this match case Str(s, _) => this case Fluid(relems) => relems.reverse.foldLeft(Str(""): Text)(_.append(width)(_)) case Vertical(relems) => Vertical(relems map (_ layout width)) - } - def map(f: String => String): Text = this match { + def map(f: String => String): Text = this match case Str(s, lines) => Str(f(s), lines) case Fluid(relems) => Fluid(relems map (_ map f)) case Vertical(relems) => Vertical(relems map (_ map f)) - } - def stripPrefix(pre: String): Text = this match { + def stripPrefix(pre: String): Text = this match case Str(s, _) => if (s.startsWith(pre)) s drop pre.length else s case Fluid(relems) => @@ -102,58 +95,50 @@ object Texts { val elems = relems.reverse val head = elems.head.stripPrefix(pre) if (head eq elems.head) this else Vertical((head :: elems.tail).reverse) - } - private def indented: Text = this match { + private def indented: Text = this match case Str(s, lines) => Str((" " * indentMargin) + s, lines) case Fluid(relems) => Fluid(relems map (_.indented)) case Vertical(relems) => Vertical(relems map (_.indented)) - } - def print(sb: StringBuilder, numberWidth: Int): Unit = this match { + def print(sb: StringBuilder, numberWidth: Int): Unit = this match case Str(s, lines) => - if (numberWidth != 0) { + if (numberWidth != 0) val ln = lines.show - if (ln.nonEmpty) { + if (ln.nonEmpty) val pad = (numberWidth - ln.length - 1) assert(pad >= 0) sb.append(" " * pad) sb.append(ln) sb.append("|") - } - } sb.append(s.replaceAll("[ ]+$", "")) case _ => var follow = false - for (elem <- relems.reverse) { + for (elem <- relems.reverse) if (follow) sb.append(System.lineSeparator) elem.print(sb, numberWidth) follow = true - } - } - def maxLine: Int = this match { + def maxLine: Int = this match case Str(_, lines) => lines.end case _ => relems.foldLeft(-1)((acc, relem) => acc max relem.maxLine) - } - def mkString(width: Int, withLineNumbers: Boolean): String = { + def mkString(width: Int, withLineNumbers: Boolean): String = val sb = new StringBuilder val numberWidth = if (withLineNumbers) (2 * maxLine.toString.length) + 2 else 0 layout(width - numberWidth).print(sb, numberWidth) sb.toString - } def ~ (that: Text): Text = if (this.isEmpty) that else if (that.isEmpty) this else this match case Fluid(relems1) if !isClosed => that match - case Fluid(relems2) if !that.isClosed => Fluid(relems2 ++ relems1) - case _ => Fluid(that +: relems1) + case Fluid(relems2) if !that.isClosed => Fluid(relems2 ++ relems1) + case _ => Fluid(that +: relems1) case _ => that match - case Fluid(relems2) if !that.isClosed => Fluid(relems2 :+ this) - case _ => Fluid(that :: this :: Nil) + case Fluid(relems2) if !that.isClosed => Fluid(relems2 :+ this) + case _ => Fluid(that :: this :: Nil) def ~~ (that: Text): Text = if (this.isEmpty) that @@ -163,9 +148,8 @@ object Texts { def over (that: Text): Vertical = if (this.isVertical) Vertical(that :: this.relems) else Vertical(that :: this :: Nil) - } - object Text { + object Text: /** The empty text */ def apply(): Text = Str("") @@ -175,11 +159,10 @@ object Texts { */ def apply(xs: Iterable[Text], sep: String = " "): Text = if (sep == "\n") lines(xs) - else { + else val ys = xs.filterNot(_.isEmpty) if (ys.isEmpty) Str("") else ys.reduceRight((a, b) => (a ~ sep).close ~ b) - } /** The given texts `xs`, each on a separate line */ def lines(xs: Iterable[Text]): Vertical = Vertical(xs.toList.reverse) @@ -187,14 +170,12 @@ object Texts { extension (text: => Text) def provided(cond: Boolean): Text = if (cond) text else Str("") - } - case class Str(s: String, lineRange: LineRange = EmptyLineRange) extends Text { + case class Str(s: String, lineRange: LineRange = EmptyLineRange) extends Text: override def relems: List[Text] = List(this) override def toString = this match case Str(s, EmptyLineRange) => s"Str($s)" case Str(s, lineRange) => s"Str($s, $lineRange)" - } case class Vertical(relems: List[Text]) extends Text case class Fluid(relems: List[Text]) extends Text @@ -205,13 +186,11 @@ object Texts { implicit def stringToText(s: String): Text = Str(s) /** Inclusive line range */ - case class LineRange(start: Int, end: Int) { + case class LineRange(start: Int, end: Int): def union(that: LineRange): LineRange = LineRange(start min that.start, end max that.end) def show: String = if (start == end) (start + 1).toString else if (start < end) s"${start + 1}-${end + 1}" else "" // empty range - } object EmptyLineRange extends LineRange(Int.MaxValue, Int.MinValue) -} diff --git a/compiler/src/dotty/tools/dotc/printing/package.scala b/compiler/src/dotty/tools/dotc/printing/package.scala index b1d091ca45c1..9c1e8961c1d5 100644 --- a/compiler/src/dotty/tools/dotc/printing/package.scala +++ b/compiler/src/dotty/tools/dotc/printing/package.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc import core.StdNames.{nme,tpnme} import util.Property.Key -package object printing { +package object printing: type Precedence = Int @@ -26,4 +26,3 @@ package object printing { private[printing] def intersperse(a: A): List[A] = nel.flatMap(a :: _ :: Nil).tail -} diff --git a/compiler/src/dotty/tools/dotc/profile/AsyncHelper.scala b/compiler/src/dotty/tools/dotc/profile/AsyncHelper.scala index 61bee4d9f32a..de2b3e6e5e83 100644 --- a/compiler/src/dotty/tools/dotc/profile/AsyncHelper.scala +++ b/compiler/src/dotty/tools/dotc/profile/AsyncHelper.scala @@ -9,24 +9,22 @@ import java.util.concurrent.atomic.AtomicInteger import dotty.tools.dotc.core.Phases.Phase import dotty.tools.dotc.core.Contexts._ -sealed trait AsyncHelper { +sealed trait AsyncHelper: def newUnboundedQueueFixedThreadPool (nThreads: Int, - shortId: String, priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor + shortId: String, priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor def newBoundedQueueFixedThreadPool (nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, - shortId: String, priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor -} + shortId: String, priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor -object AsyncHelper { - def apply(phase: Phase)(using Context): AsyncHelper = ctx.profiler match { +object AsyncHelper: + def apply(phase: Phase)(using Context): AsyncHelper = ctx.profiler match case NoOpProfiler => new BasicAsyncHelper(phase) case r: RealProfiler => new ProfilingAsyncHelper(phase, r) - } - private abstract class BaseAsyncHelper(phase: Phase)(using Context) extends AsyncHelper { + private abstract class BaseAsyncHelper(phase: Phase)(using Context) extends AsyncHelper: val baseGroup = new ThreadGroup(s"dotc-${phase.phaseName}") private def childGroup(name: String) = new ThreadGroup(baseGroup, name) @@ -34,69 +32,59 @@ object AsyncHelper { protected class CommonThreadFactory(shortId: String, daemon: Boolean = true, - priority: Int) extends ThreadFactory { + priority: Int) extends ThreadFactory: private val group: ThreadGroup = childGroup(shortId) private val threadNumber: AtomicInteger = new AtomicInteger(1) private val namePrefix = s"${baseGroup.getName}-$shortId-" - override def newThread(r: Runnable): Thread = { + override def newThread(r: Runnable): Thread = val wrapped = wrapRunnable(r, shortId) val t: Thread = new Thread(group, wrapped, namePrefix + threadNumber.getAndIncrement, 0) if (t.isDaemon != daemon) t.setDaemon(daemon) if (t.getPriority != priority) t.setPriority(priority) t - } - } - } - private final class BasicAsyncHelper(phase: Phase)(using Context) extends BaseAsyncHelper(phase) { + private final class BasicAsyncHelper(phase: Phase)(using Context) extends BaseAsyncHelper(phase): - override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = { + override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = val threadFactory = new CommonThreadFactory(shortId, priority = priority) //like Executors.newFixedThreadPool new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue[Runnable], threadFactory) - } - override def newBoundedQueueFixedThreadPool(nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, shortId: String, priority: Int): ThreadPoolExecutor = { + override def newBoundedQueueFixedThreadPool(nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, shortId: String, priority: Int): ThreadPoolExecutor = val threadFactory = new CommonThreadFactory(shortId, priority = priority) //like Executors.newFixedThreadPool new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) - } override protected def wrapRunnable(r: Runnable, shortId:String): Runnable = r - } - private class ProfilingAsyncHelper(phase: Phase, private val profiler: RealProfiler)(using Context) extends BaseAsyncHelper(phase) { + private class ProfilingAsyncHelper(phase: Phase, private val profiler: RealProfiler)(using Context) extends BaseAsyncHelper(phase): - override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = { + override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = val threadFactory = new CommonThreadFactory(shortId, priority = priority) //like Executors.newFixedThreadPool new SinglePhaseInstrumentedThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue[Runnable], threadFactory, new AbortPolicy) - } - override def newBoundedQueueFixedThreadPool(nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, shortId: String, priority: Int): ThreadPoolExecutor = { + override def newBoundedQueueFixedThreadPool(nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, shortId: String, priority: Int): ThreadPoolExecutor = val threadFactory = new CommonThreadFactory(shortId, priority = priority) //like Executors.newFixedThreadPool new SinglePhaseInstrumentedThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) - } - override protected def wrapRunnable(r: Runnable, shortId:String): Runnable = { + override protected def wrapRunnable(r: Runnable, shortId:String): Runnable = () => - val data = new ThreadProfileData - localData.set(data) + val data = new ThreadProfileData + localData.set(data) - val profileStart = profiler.snapThread(0) - try r.run finally { - val snap = profiler.snapThread(data.idleNs) - val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) - profiler.completeBackground(threadRange) - } - } + val profileStart = profiler.snapThread(0) + try r.run finally + val snap = profiler.snapThread(data.idleNs) + val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) + profiler.completeBackground(threadRange) /** * data for thread run. Not threadsafe, only written from a single thread */ - final class ThreadProfileData { + final class ThreadProfileData: var firstStartNs = 0L var taskCount = 0 @@ -105,16 +93,15 @@ object AsyncHelper { var lastStartNs = 0L var lastEndNs = 0L - } val localData = new ThreadLocal[ThreadProfileData] private class SinglePhaseInstrumentedThreadPoolExecutor ( corePoolSize: Int, maximumPoolSize: Int, keepAliveTime: Long, unit: TimeUnit, workQueue: BlockingQueue[Runnable], threadFactory: ThreadFactory, handler: RejectedExecutionHandler - ) extends ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler) { + ) extends ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler): - override def beforeExecute(t: Thread, r: Runnable): Unit = { + override def beforeExecute(t: Thread, r: Runnable): Unit = val data = localData.get data.taskCount += 1 val now = System.nanoTime() @@ -125,9 +112,8 @@ object AsyncHelper { data.lastStartNs = now super.beforeExecute(t, r) - } - override def afterExecute(r: Runnable, t: Throwable): Unit = { + override def afterExecute(r: Runnable, t: Throwable): Unit = val now = System.nanoTime() val data = localData.get @@ -135,8 +121,4 @@ object AsyncHelper { data.runningNs += now - data.lastStartNs super.afterExecute(r, t) - } - } - } -} diff --git a/compiler/src/dotty/tools/dotc/profile/Profiler.scala b/compiler/src/dotty/tools/dotc/profile/Profiler.scala index 64cc08160701..1b07d5305427 100644 --- a/compiler/src/dotty/tools/dotc/profile/Profiler.scala +++ b/compiler/src/dotty/tools/dotc/profile/Profiler.scala @@ -15,28 +15,25 @@ import dotty.tools.dotc.core.Contexts._ import dotty.tools.io.AbstractFile import annotation.internal.sharable -object Profiler { +object Profiler: def apply()(using Context): Profiler = if (!ctx.settings.YprofileEnabled.value) NoOpProfiler - else { + else val reporter = if (ctx.settings.YprofileDestination.value != "") new StreamProfileReporter(new PrintWriter(new FileWriter(ctx.settings.YprofileDestination.value, true))) else ConsoleProfileReporter new RealProfiler(reporter) - } private[profile] val emptySnap: ProfileSnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0) -} case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, name:String, action:String, cause:String, threads:Long) case class ProfileSnap(threadId: Long, threadName: String, snapTimeNanos : Long, idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, - allocatedBytes:Long, heapBytes:Long) { + allocatedBytes:Long, heapBytes:Long): def updateHeap(heapBytes:Long): ProfileSnap = copy(heapBytes = heapBytes) -} -case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpose:String, taskCount:Int, thread:Thread) { +case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpose:String, taskCount:Int, thread:Thread): def allocatedBytes: Long = end.allocatedBytes - start.allocatedBytes def userNs: Long = end.userTimeNanos - start.userTimeNanos @@ -64,25 +61,22 @@ case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpos def allocatedMB: Double = toMegaBytes(end.allocatedBytes - start.allocatedBytes) def retainedHeapMB: Double = toMegaBytes(end.heapBytes - start.heapBytes) -} -sealed trait Profiler { +sealed trait Profiler: def finished(): Unit def beforePhase(phase: Phase): ProfileSnap def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit -} -private [profile] object NoOpProfiler extends Profiler { +private [profile] object NoOpProfiler extends Profiler: override def beforePhase(phase: Phase): ProfileSnap = Profiler.emptySnap override def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit = () override def finished(): Unit = () -} -private [profile] object RealProfiler { +private [profile] object RealProfiler: import scala.jdk.CollectionConverters._ val runtimeMx: RuntimeMXBean = ManagementFactory.getRuntimeMXBean val memoryMx: MemoryMXBean = ManagementFactory.getMemoryMXBean @@ -92,9 +86,8 @@ private [profile] object RealProfiler { val threadMx: ExtendedThreadMxBean = ExtendedThreadMxBean.proxy if (threadMx.isThreadCpuTimeSupported) threadMx.setThreadCpuTimeEnabled(true) private val idGen = new AtomicInteger() -} -private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) extends Profiler with NotificationListener { +private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) extends Profiler with NotificationListener: def completeBackground(threadRange: ProfileRange): Unit = reporter.reportBackground(this, threadRange) @@ -105,7 +98,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) private val mainThread = Thread.currentThread() @nowarn("cat=deprecation") - private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = { + private[profile] def snapThread(idleTimeNanos: Long): ProfileSnap = import RealProfiler._ val current = Thread.currentThread() @@ -119,33 +112,28 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId), heapBytes = readHeapUsage() ) - } private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed @nowarn - private def doGC: Unit = { + private def doGC: Unit = System.gc() System.runFinalization() - } - RealProfiler.gcMx foreach { + RealProfiler.gcMx foreach: case emitter: NotificationEmitter => emitter.addNotificationListener(this, null, null) case gc => println(s"Cant connect gcListener to ${gc.getClass}") - } reporter.header(this) - override def finished(): Unit = { + override def finished(): Unit = //we may miss a GC event if gc is occurring as we call this - RealProfiler.gcMx foreach { + RealProfiler.gcMx foreach: case emitter: NotificationEmitter => emitter.removeNotificationListener(this) case gc => - } reporter.close(this) - } - override def handleNotification(notification: Notification, handback: scala.Any): Unit = { + override def handleNotification(notification: Notification, handback: scala.Any): Unit = import java.lang.{Long => jLong} import java.lang.{Integer => jInt} val reportNs = System.nanoTime() @@ -154,7 +142,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) val message = notification.getMessage val tpe = notification.getType val time= notification.getTimeStamp - data match { + data match case cd: CompositeData if tpe == "com.sun.management.gc.notification" => val name = cd.get("gcName").toString val action = cd.get("gcAction").toString @@ -165,49 +153,40 @@ private [profile] class RealProfiler(reporter : ProfileReporter)(using Context) val endTime = info.get("endTime").asInstanceOf[jLong].longValue() val threads = info.get("GcThreadCount").asInstanceOf[jInt].longValue() reporter.reportGc(GcEventData("", reportNs, startTime, endTime, name, action, cause, threads)) - } - } - override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = { + override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = assert(mainThread eq Thread.currentThread()) val initialSnap = snapThread(0) - if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) { + if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) println("Profile hook stop") ExternalToolHook.after() - } - val finalSnap = if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) { + val finalSnap = if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) doGC initialSnap.updateHeap(readHeapUsage()) - } else initialSnap reporter.reportForeground(this, ProfileRange(snapBefore, finalSnap, phase, "", 0, Thread.currentThread)) - } - override def beforePhase(phase: Phase): ProfileSnap = { + override def beforePhase(phase: Phase): ProfileSnap = assert(mainThread eq Thread.currentThread()) if (ctx.settings.YprofileRunGcBetweenPhases.value.contains(phase.toString)) doGC - if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) { + if (ctx.settings.YprofileExternalTool.value.contains(phase.toString)) println("Profile hook start") ExternalToolHook.before() - } snapThread(0) - } -} case class EventType(name: String) -object EventType { +object EventType: //main thread with other tasks val MAIN: EventType = EventType("main") //other task ( background thread) val BACKGROUND: EventType = EventType("background") //total for compile val GC: EventType = EventType("GC") -} -sealed trait ProfileReporter { +sealed trait ProfileReporter: def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit @@ -215,9 +194,8 @@ sealed trait ProfileReporter { def header(profiler: RealProfiler) :Unit def close(profiler: RealProfiler) :Unit -} -object ConsoleProfileReporter extends ProfileReporter { +object ConsoleProfileReporter extends ProfileReporter: @sharable var totalAlloc = 0L override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = @@ -236,14 +214,12 @@ object ConsoleProfileReporter extends ProfileReporter { override def reportGc(data: GcEventData): Unit = println(s"Profiler GC reported ${data.gcEndMillis - data.gcStartMillis}ms") -} -class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { - override def header(profiler: RealProfiler): Unit = { +class StreamProfileReporter(out:PrintWriter) extends ProfileReporter: + override def header(profiler: RealProfiler): Unit = out.println(s"info, ${profiler.id}, version, 2, output, ${profiler.outDir}") out.println(s"header(main/background),startNs,endNs,runId,phaseId,phaseName,purpose,task-count,threadId,threadName,runNs,idleNs,cpuTimeNs,userTimeNs,allocatedByte,heapSize") out.println(s"header(GC),startNs,endNs,startMs,endMs,name,action,cause,threads") - } override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = reportCommon(EventType.BACKGROUND, profiler, threadRange) @@ -253,15 +229,12 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { private def reportCommon(tpe:EventType, profiler: RealProfiler, threadRange: ProfileRange): Unit = out.println(s"$tpe,${threadRange.start.snapTimeNanos},${threadRange.end.snapTimeNanos},${profiler.id},${threadRange.phase.id},${threadRange.phase.phaseName.replace(',', ' ')},${threadRange.purpose},${threadRange.taskCount},${threadRange.thread.getId},${threadRange.thread.getName},${threadRange.runNs},${threadRange.idleNs},${threadRange.cpuNs},${threadRange.userNs},${threadRange.allocatedBytes},${threadRange.end.heapBytes} ") - override def reportGc(data: GcEventData): Unit = { + override def reportGc(data: GcEventData): Unit = val duration = TimeUnit.MILLISECONDS.toNanos(data.gcEndMillis - data.gcStartMillis + 1) val start = data.reportTimeNs - duration out.println(s"${EventType.GC},$start,${data.reportTimeNs},${data.gcStartMillis}, ${data.gcEndMillis},${data.name},${data.action},${data.cause},${data.threads}") - } - override def close(profiler: RealProfiler): Unit = { + override def close(profiler: RealProfiler): Unit = out.flush out.close - } -} diff --git a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala index c9a77dbfa151..6a0fbc587d77 100644 --- a/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala +++ b/compiler/src/dotty/tools/dotc/quoted/Interpreter.scala @@ -50,18 +50,17 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. */ final def interpret[T](tree: Tree)(using ct: ClassTag[T]): Option[T] = - interpretTree(tree)(using emptyEnv) match { + interpretTree(tree)(using emptyEnv) match case obj: T => Some(obj) case obj => // TODO upgrade to a full type tag check or something similar report.error(em"Interpreted tree returned a result of an unexpected type. Expected ${ct.runtimeClass} but was ${obj.getClass}", pos) None - } /** Returns the result of interpreting the code in the tree. * Throws a StopInterpretation if the tree could not be interpreted or a runtime exception ocurred. */ - protected def interpretTree(tree: Tree)(using Env): Object = tree match { + protected def interpretTree(tree: Tree)(using Env): Object = tree match case Literal(Constant(value)) => interpretLiteral(value) @@ -76,18 +75,16 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): interpretNew(fn.symbol, args.flatten.map(interpretTree)) else if (fn.symbol.is(Module)) interpretModuleAccess(fn.symbol) - else if (fn.symbol.is(Method) && fn.symbol.isStatic) { + else if (fn.symbol.is(Method) && fn.symbol.isStatic) interpretedStaticMethodCall(fn.symbol.owner, fn.symbol, interpretArgs(args, fn.symbol.info)) - } else if fn.symbol.isStatic then assert(args.isEmpty) interpretedStaticFieldAccess(fn.symbol) else if (fn.qualifier.symbol.is(Module) && fn.qualifier.symbol.isStatic) if (fn.name == nme.asInstanceOfPM) interpretModuleAccess(fn.qualifier.symbol) - else { + else interpretedStaticMethodCall(fn.qualifier.symbol.moduleClass, fn.symbol, interpretArgs(args, fn.symbol.info)) - } else if (env.contains(fn.symbol)) env(fn.symbol) else if (tree.symbol.is(InlineProxy)) @@ -113,9 +110,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): case _ => unexpectedTree(tree) - } - private def interpretArgs(argss: List[List[Tree]], fnType: Type)(using Env): List[Object] = { + private def interpretArgs(argss: List[List[Tree]], fnType: Type)(using Env): List[Object] = def interpretArgsGroup(args: List[Tree], argTypes: List[Type]): List[Object] = assert(args.size == argTypes.size) val view = @@ -139,9 +135,8 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): case _ => assert(argss.isEmpty) Nil - } - private def interpretBlock(stats: List[Tree], expr: Tree)(using Env) = { + private def interpretBlock(stats: List[Tree], expr: Tree)(using Env) = var unexpected: Option[Object] = None val newEnv = stats.foldLeft(env)((accEnv, stat) => stat match case stat: ValDef => @@ -152,7 +147,6 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): accEnv ) unexpected.getOrElse(interpretTree(expr)(using newEnv)) - } private def interpretLiteral(value: Any): Object = value.asInstanceOf[Object] @@ -160,7 +154,7 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): private def interpretVarargs(args: List[Object]): Object = args.toSeq - private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol, args: List[Object]): Object = { + private def interpretedStaticMethodCall(moduleClass: Symbol, fn: Symbol, args: List[Object]): Object = val inst = try loadModule(moduleClass) catch @@ -170,43 +164,37 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): val name = fn.name.asTermName val method = getMethod(clazz, name, paramsSig(fn)) stopIfRuntimeException(method.invoke(inst, args: _*), method) - } - private def interpretedStaticFieldAccess(sym: Symbol): Object = { + private def interpretedStaticFieldAccess(sym: Symbol): Object = val clazz = loadClass(sym.owner.fullName.toString) val field = clazz.getField(sym.name.toString) field.get(null) - } private def interpretModuleAccess(fn: Symbol): Object = loadModule(fn.moduleClass) - private def interpretNew(fn: Symbol, args: List[Object]): Object = { + private def interpretNew(fn: Symbol, args: List[Object]): Object = val className = fn.owner.fullName.mangledString.replaceAll("\\$\\.", "\\$") val clazz = loadClass(className) val constr = clazz.getConstructor(paramsSig(fn): _*) constr.newInstance(args: _*).asInstanceOf[Object] - } private def unexpectedTree(tree: Tree): Object = throw new StopInterpretation(em"Unexpected tree could not be interpreted: ${tree.toString}", tree.srcPos) private def loadModule(sym: Symbol): Object = - if (sym.owner.is(Package)) { + if (sym.owner.is(Package)) // is top level object val moduleClass = loadClass(sym.fullName.toString) moduleClass.getField(str.MODULE_INSTANCE_FIELD).get(null) - } - else { + else // nested object in an object val clazz = loadClass(sym.binaryClassName) clazz.getConstructor().newInstance().asInstanceOf[Object] - } - private def loadReplLineClass(moduleClass: Symbol): Class[?] = { + private def loadReplLineClass(moduleClass: Symbol): Class[?] = val lineClassloader = new AbstractFileClassLoader(ctx.settings.outputDir.value, classLoader) lineClassloader.loadClass(moduleClass.name.firstPart.toString) - } private def loadClass(name: String): Class[?] = try classLoader.loadClass(name) @@ -217,17 +205,16 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): private def getMethod(clazz: Class[?], name: Name, paramClasses: List[Class[?]]): JLRMethod = try clazz.getMethod(name.toString, paramClasses: _*) - catch { + catch case _: NoSuchMethodException => val msg = em"Could not find method ${clazz.getCanonicalName}.$name with parameters ($paramClasses%, %)" throw new StopInterpretation(msg, pos) case MissingClassDefinedInCurrentRun(sym) => suspendOnMissing(sym, pos) - } private def stopIfRuntimeException[T](thunk: => T, method: JLRMethod): T = try thunk - catch { + catch case ex: RuntimeException => val sw = new StringWriter() sw.write("A runtime exception occurred while executing macro expansion\n") @@ -237,7 +224,7 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): sw.write("\n") throw new StopInterpretation(sw.toString.toMessage, pos) case ex: InvocationTargetException => - ex.getTargetException match { + ex.getTargetException match case ex: scala.quoted.runtime.StopMacroExpansion => throw ex case MissingClassDefinedInCurrentRun(sym) => @@ -245,7 +232,7 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): case targetException => val sw = new StringWriter() sw.write("Exception occurred while executing macro expansion.\n") - if (!ctx.settings.Ydebug.value) { + if (!ctx.settings.Ydebug.value) val end = targetException.getStackTrace.lastIndexWhere { x => x.getClassName == method.getDeclaringClass.getCanonicalName && x.getMethodName == method.getName } @@ -256,22 +243,18 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): targetException match case _: CyclicReference => sw.write("\nSee full stack trace using -Ydebug") case _ => - } else { + else targetException.printStackTrace(new PrintWriter(sw)) - } sw.write("\n") throw new StopInterpretation(sw.toString.toMessage, pos) - } - } /** List of classes of the parameters of the signature of `sym` */ - private def paramsSig(sym: Symbol): List[Class[?]] = { - def paramClass(param: Type): Class[?] = { - def arrayDepth(tpe: Type, depth: Int): (Type, Int) = tpe match { + private def paramsSig(sym: Symbol): List[Class[?]] = + def paramClass(param: Type): Class[?] = + def arrayDepth(tpe: Type, depth: Int): (Type, Int) = tpe match case JavaArrayType(elemType) => arrayDepth(elemType, depth + 1) case _ => (tpe, depth) - } - def javaArraySig(tpe: Type): String = { + def javaArraySig(tpe: Type): String = val (elemType, depth) = arrayDepth(tpe, 0) val sym = elemType.classSymbol val suffix = @@ -285,15 +268,13 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): else if (sym == defn.CharClass) "C" else "L" + javaSig(elemType) + ";" ("[" * depth) + suffix - } - def javaSig(tpe: Type): String = tpe match { + def javaSig(tpe: Type): String = tpe match case tpe: JavaArrayType => javaArraySig(tpe) case _ => // Take the flatten name of the class and the full package name val pack = tpe.classSymbol.topLevelClass.owner val packageName = if (pack == defn.EmptyPackageClass) "" else s"${pack.fullName}." packageName + tpe.classSymbol.fullNameSeparated(FlatName).toString - } val sym = param.classSymbol if (sym == defn.BooleanClass) classOf[Boolean] @@ -305,20 +286,16 @@ class Interpreter(pos: SrcPos, classLoader0: ClassLoader)(using Context): else if (sym == defn.FloatClass) classOf[Float] else if (sym == defn.DoubleClass) classOf[Double] else java.lang.Class.forName(javaSig(param), false, classLoader) - } - def getExtraParams(tp: Type): List[Type] = tp.widenDealias match { + def getExtraParams(tp: Type): List[Type] = tp.widenDealias match case tp: AppliedType if defn.isContextFunctionType(tp) => // Call context function type direct method tp.args.init.map(arg => TypeErasure.erasure(arg)) ::: getExtraParams(tp.args.last) case _ => Nil - } val extraParams = getExtraParams(sym.info.finalResultType) - val allParams = TypeErasure.erasure(sym.info) match { + val allParams = TypeErasure.erasure(sym.info) match case meth: MethodType => meth.paramInfos ::: extraParams case _ => extraParams - } allParams.map(paramClass) - } end Interpreter object Interpreter: @@ -333,8 +310,8 @@ object Interpreter: def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = Call0.unapply(arg).map((fn, args) => (fn, args.reverse)) - private object Call0 { - def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = arg match { + private object Call0: + def unapply(arg: Tree)(using Context): Option[(RefTree, List[List[Tree]])] = arg match case Select(Call0(fn, args), nme.apply) if defn.isContextFunctionType(fn.tpe.widenDealias.finalResultType) => Some((fn, args)) case fn: Ident => Some((tpd.desugarIdent(fn).withSpan(fn.span), Nil)) @@ -344,12 +321,10 @@ object Interpreter: else Some((fn, args2 :: args1)) case TypeApply(Call0(fn, args), _) => Some((fn, args)) case _ => None - } - } end Call - object MissingClassDefinedInCurrentRun { - def unapply(targetException: Throwable)(using Context): Option[Symbol] = { + object MissingClassDefinedInCurrentRun: + def unapply(targetException: Throwable)(using Context): Option[Symbol] = if !ctx.compilationUnit.isSuspendable then None else targetException match case _: NoClassDefFoundError | _: ClassNotFoundException => @@ -359,8 +334,6 @@ object Interpreter: val sym = staticRef(className.toTypeName).symbol if (sym.isDefinedInCurrentRun) Some(sym) else None case _ => None - } - } def suspendOnMissing(sym: Symbol, pos: SrcPos)(using Context): Nothing = if ctx.settings.XprintSuspension.value then diff --git a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala index 141b349826b4..02e1294efd18 100644 --- a/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala +++ b/compiler/src/dotty/tools/dotc/quoted/MacroExpansion.scala @@ -5,7 +5,7 @@ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.typer.Typer import dotty.tools.dotc.util.{Property, SourcePosition} -object MacroExpansion { +object MacroExpansion: private val MacroExpansionPosition = new Property.Key[SourcePosition] @@ -14,5 +14,4 @@ object MacroExpansion { def context(inlinedFrom: tpd.Tree)(using Context): Context = QuotesCache.init(ctx.fresh).setProperty(MacroExpansionPosition, SourcePosition(inlinedFrom.source, inlinedFrom.span)).setTypeAssigner(new Typer(ctx.nestingLevel + 1)).withSource(inlinedFrom.source) -} diff --git a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala index 7596549fe401..facfb835d12f 100644 --- a/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala +++ b/compiler/src/dotty/tools/dotc/quoted/PickledQuotes.scala @@ -22,31 +22,28 @@ import scala.collection.mutable import QuoteUtils._ -object PickledQuotes { +object PickledQuotes: import tpd._ /** Pickle the tree of the quote into strings */ def pickleQuote(tree: Tree)(using Context): List[String] = if (ctx.reporter.hasErrors) Nil - else { + else assert(!tree.isInstanceOf[Hole]) // Should not be pickled as it represents `'{$x}` which should be optimized to `x` val pickled = pickle(tree) TastyString.pickle(pickled) - } /** Transform the expression into its fully spliced Tree */ - def quotedExprToTree[T](expr: quoted.Expr[T])(using Context): Tree = { + def quotedExprToTree[T](expr: quoted.Expr[T])(using Context): Tree = val expr1 = expr.asInstanceOf[ExprImpl] ScopeException.checkInCorrectScope(expr1.scope, SpliceScope.getCurrent, expr1.tree, "Expr") changeOwnerOfTree(expr1.tree, ctx.owner) - } /** Transform the expression into its fully spliced TypeTree */ - def quotedTypeToTree(tpe: quoted.Type[?])(using Context): Tree = { + def quotedTypeToTree(tpe: quoted.Type[?])(using Context): Tree = val tpe1 = tpe.asInstanceOf[TypeImpl] ScopeException.checkInCorrectScope(tpe1.scope, SpliceScope.getCurrent, tpe1.typeTree, "Type") changeOwnerOfTree(tpe1.typeTree, ctx.owner) - } /** `typeHole`/`types` argument of `QuoteUnpickler.{unpickleExpr,unpickleExprV2,unpickleType,unpickleTypeV2}` */ enum TypeHole: @@ -80,28 +77,26 @@ object PickledQuotes { type ArgV2 = scala.quoted.Type[?] | scala.quoted.Expr[Any] /** Unpickle the tree contained in the TastyExpr */ - def unpickleTerm(pickled: String | List[String], typeHole: TypeHole, termHole: ExprHole)(using Context): Tree = { + def unpickleTerm(pickled: String | List[String], typeHole: TypeHole, termHole: ExprHole)(using Context): Tree = val unpickled = withMode(Mode.ReadPositions)(unpickle(pickled, isType = false)) val Inlined(call, Nil, expansion) = unpickled: @unchecked val inlineCtx = inlineContext(call) val expansion1 = spliceTypes(expansion, typeHole)(using inlineCtx) val expansion2 = spliceTerms(expansion1, typeHole, termHole)(using inlineCtx) cpy.Inlined(unpickled)(call, Nil, expansion2) - } /** Unpickle the tree contained in the TastyType */ - def unpickleTypeTree(pickled: String | List[String], typeHole: TypeHole)(using Context): Tree = { + def unpickleTypeTree(pickled: String | List[String], typeHole: TypeHole)(using Context): Tree = val unpickled = withMode(Mode.ReadPositions)(unpickle(pickled, isType = true)) spliceTypes(unpickled, typeHole) - } /** Replace all term holes with the spliced terms */ - private def spliceTerms(tree: Tree, typeHole: TypeHole, termHole: ExprHole)(using Context): Tree = { - def evaluateHoles = new TreeMap { - override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match { + private def spliceTerms(tree: Tree, typeHole: TypeHole, termHole: ExprHole)(using Context): Tree = + def evaluateHoles = new TreeMap: + override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match case Hole(isTerm, idx, args, _) => - inContext(SpliceScope.contextWithNewSpliceScope(tree.sourcePos)) { + inContext(SpliceScope.contextWithNewSpliceScope(tree.sourcePos)): if isTerm then val quotedExpr = termHole match case ExprHole.V1(evalHole) => @@ -124,38 +119,31 @@ object PickledQuotes { val TypeHole.V1(evalHole) = typeHole: @unchecked val quotedType = evalHole.nn.apply(idx, reifyTypeHoleArgs(args)) PickledQuotes.quotedTypeToTree(quotedType) - } case tree => if tree.isDef then - tree.symbol.annotations = tree.symbol.annotations.map { + tree.symbol.annotations = tree.symbol.annotations.map: annot => annot.derivedAnnotation(transform(annot.tree)) - } end if - val tree1 = super.transform(tree) - tree1.withType(mapAnnots(tree1.tpe)) - } + val tree1 = super.transform(tree) + tree1.withType(mapAnnots(tree1.tpe)) // Evaluate holes in type annotations - private val mapAnnots = new TypeMap { - override def apply(tp: Type): Type = { + private val mapAnnots = new TypeMap: + override def apply(tp: Type): Type = tp match case tp @ AnnotatedType(underlying, annot) => val underlying1 = this(underlying) derivedAnnotatedType(tp, underlying1, annot.derivedAnnotation(transform(annot.tree))) case _ => mapOver(tp) - } - } - } val tree1 = termHole match case ExprHole.V2(null) => tree case _ => evaluateHoles.transform(tree) quotePickling.println(i"**** evaluated quote\n$tree1") tree1 - } /** Replace all type holes generated with the spliced types */ - private def spliceTypes(tree: Tree, typeHole: TypeHole)(using Context): Tree = { + private def spliceTypes(tree: Tree, typeHole: TypeHole)(using Context): Tree = if typeHole.isEmpty then tree else tree match case Block(stat :: rest, expr1) if stat.symbol.hasAnnotation(defn.QuotedRuntime_SplicedTypeAnnot) => @@ -177,8 +165,8 @@ object PickledQuotes { PickledQuotes.quotedTypeToTree(types.nn.apply(idx)) (tdef.symbol, tree.tpe) }.toMap - class ReplaceSplicedTyped extends TypeMap() { - override def apply(tp: Type): Type = tp match { + class ReplaceSplicedTyped extends TypeMap(): + override def apply(tp: Type): Type = tp match case tp: ClassInfo => tp.derivedClassInfo(declaredParents = tp.declaredParents.map(apply)) case tp: TypeRef => @@ -187,14 +175,11 @@ object PickledQuotes { case _ => mapOver(tp) case _ => mapOver(tp) - } - } val expansion2 = new TreeTypeMap(new ReplaceSplicedTyped).transform(expr1) quotePickling.println(i"**** typed quote\n${expansion2.show}") expansion2 case _ => tree - } def reifyTypeHoleArgs(args: List[Tree])(using Context): List[scala.quoted.Type[?]] = args.map(arg => new TypeImpl(arg, SpliceScope.getCurrent)) @@ -214,7 +199,7 @@ object PickledQuotes { // TASTY picklingtests/pos/quoteTest.scala /** Pickle tree into it's TASTY bytes s*/ - private def pickle(tree: Tree)(using Context): Array[Byte] = { + private def pickle(tree: Tree)(using Context): Array[Byte] = quotePickling.println(i"**** pickling quote of\n$tree") val pickler = new TastyPickler(defn.RootClass) val treePkl = new TreePickler(pickler) @@ -230,10 +215,9 @@ object PickledQuotes { val pickled = pickler.assembleParts() quotePickling.println(s"**** pickled quote\n${TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")}") pickled - } /** Unpickle TASTY bytes into it's tree */ - private def unpickle(pickled: String | List[String], isType: Boolean)(using Context): Tree = { + private def unpickle(pickled: String | List[String], isType: Boolean)(using Context): Tree = QuotesCache.getTree(pickled) match case Some(tree) => quotePickling.println(s"**** Using cached quote for TASTY\n$tree") @@ -261,9 +245,9 @@ object PickledQuotes { // Quotes context that that has a class `spliceOwner` can come from a macro annotation // or a user setting it explicitly using `Symbol.asQuotes`. ctx.withOwner(newSymbol(ctx.owner, "$quoteOwnedByClass$".toTermName, Private, defn.AnyType, NoSymbol)) - else ctx + else ctx - inContext(unpicklingContext) { + inContext(unpicklingContext): quotePickling.println(s"**** unpickling quote from TASTY\n${TastyPrinter.showContents(bytes, ctx.settings.color.value == "never")}") @@ -280,8 +264,5 @@ object PickledQuotes { quotePickling.println(i"**** unpickled quote\n$tree") tree - } - } -} diff --git a/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala b/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala index 604c8da3420a..a6a6624bc385 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuoteUtils.scala @@ -8,23 +8,19 @@ object QuoteUtils: import tpd._ /** Get the owner of a tree if it has one */ - def treeOwner(tree: Tree)(using Context): Option[Symbol] = { - val getCurrentOwner = new TreeAccumulator[Option[Symbol]] { + def treeOwner(tree: Tree)(using Context): Option[Symbol] = + val getCurrentOwner = new TreeAccumulator[Option[Symbol]]: def apply(x: Option[Symbol], tree: tpd.Tree)(using Context): Option[Symbol] = if (x.isDefined) x - else tree match { + else tree match case tree: DefTree => Some(tree.symbol.owner) case _ => foldOver(x, tree) - } - } getCurrentOwner(None, tree) - } /** Changes the owner of the tree based on the current owner of the tree */ - def changeOwnerOfTree(tree: Tree, owner: Symbol)(using Context): Tree = { + def changeOwnerOfTree(tree: Tree, owner: Symbol)(using Context): Tree = treeOwner(tree) match case Some(oldOwner) if oldOwner != owner => tree.changeOwner(oldOwner, owner) case _ => tree - } end QuoteUtils diff --git a/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala b/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala index c063e437cb19..696add4b49b8 100644 --- a/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala +++ b/compiler/src/dotty/tools/dotc/quoted/QuotesCache.scala @@ -5,7 +5,7 @@ import dotty.tools.dotc.util.Property import dotty.tools.dotc.ast.tpd -object QuotesCache { +object QuotesCache: import tpd._ /** A key to be used in a context property that caches the unpickled trees */ @@ -23,4 +23,3 @@ object QuotesCache { /** Context with a cache for quote trees and tasty bytes */ def init(ctx: FreshContext): ctx.type = ctx.setProperty(QuotesCacheKey, collection.mutable.Map.empty) -} diff --git a/compiler/src/dotty/tools/dotc/quoted/TastyString.scala b/compiler/src/dotty/tools/dotc/quoted/TastyString.scala index 369e646e883e..28d10718c732 100644 --- a/compiler/src/dotty/tools/dotc/quoted/TastyString.scala +++ b/compiler/src/dotty/tools/dotc/quoted/TastyString.scala @@ -6,26 +6,23 @@ import java.util.Base64 import java.nio.charset.StandardCharsets.UTF_8 /** Utils for String representation of TASTY */ -object TastyString { +object TastyString: // Max size of a string literal in the bytecode private inline val maxStringSize = 65535 /** Encode TASTY bytes into a List of String */ - def pickle(bytes: Array[Byte]): List[String] = { + def pickle(bytes: Array[Byte]): List[String] = val str = new String(Base64.getEncoder().encode(bytes), UTF_8) str.toSeq.sliding(maxStringSize, maxStringSize).map(_.unwrap).toList - } /** Decode the List of Strings into TASTY bytes */ - def unpickle(strings: List[String]): Array[Byte] = { + def unpickle(strings: List[String]): Array[Byte] = val string = new StringBuilder strings.foreach(string.append) Base64.getDecoder().decode(string.result().getBytes(UTF_8)) - } /** Decode the Strings into TASTY bytes */ def unpickle(string: String): Array[Byte] = Base64.getDecoder().decode(string.getBytes(UTF_8)) -} diff --git a/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala b/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala index 87d0cbb7be08..670ed34f34da 100644 --- a/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala +++ b/compiler/src/dotty/tools/dotc/quoted/reflect/FromSymbol.scala @@ -9,9 +9,9 @@ import dotty.tools.dotc.core.StdNames._ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Types._ -object FromSymbol { +object FromSymbol: - def definitionFromSym(sym: Symbol)(using Context): tpd.Tree = { + def definitionFromSym(sym: Symbol)(using Context): tpd.Tree = assert(sym.exists, "Cannot get tree of no symbol") assert(!sym.is(Package), "Cannot get tree of package symbol") if (sym.isClass) classDef(sym.asClass) @@ -20,9 +20,8 @@ object FromSymbol { else if (sym.is(Method)) defDefFromSym(sym.asTerm) else if (sym.is(Case, butNot = ModuleVal | EnumVal)) bindFromSym(sym.asTerm) else valDefFromSym(sym.asTerm) - } - def classDef(cls: ClassSymbol)(using Context): tpd.TypeDef = cls.defTree match { + def classDef(cls: ClassSymbol)(using Context): tpd.TypeDef = cls.defTree match case tree: tpd.TypeDef => tree case tpd.EmptyTree => val constrSym = cls.unforcedDecls.find(_.isPrimaryConstructor).orElse( @@ -33,30 +32,23 @@ object FromSymbol { val parents = cls.info.parents.map(tpd.TypeTree(_)) val body = cls.unforcedDecls.filter(!_.isPrimaryConstructor).map(s => definitionFromSym(s)) tpd.ClassDefWithParents(cls, constr, parents, body) - } - def typeDefFromSym(sym: TypeSymbol)(using Context): tpd.TypeDef = sym.defTree match { + def typeDefFromSym(sym: TypeSymbol)(using Context): tpd.TypeDef = sym.defTree match case tree: tpd.TypeDef => tree case tpd.EmptyTree => tpd.TypeDef(sym) - } - def defDefFromSym(sym: TermSymbol)(using Context): tpd.DefDef = sym.defTree match { + def defDefFromSym(sym: TermSymbol)(using Context): tpd.DefDef = sym.defTree match case tree: tpd.DefDef => tree case tpd.EmptyTree => tpd.DefDef(sym) - } - def valDefFromSym(sym: TermSymbol)(using Context): tpd.ValDef = sym.defTree match { + def valDefFromSym(sym: TermSymbol)(using Context): tpd.ValDef = sym.defTree match case tree: tpd.ValDef => tree case tpd.EmptyTree => tpd.ValDef(sym) - } - def bindFromSym(sym: TermSymbol)(using Context): tpd.Bind = sym.defTree match { + def bindFromSym(sym: TermSymbol)(using Context): tpd.Bind = sym.defTree match case tree: tpd.Bind => tree case tpd.EmptyTree => tpd.Bind(sym, untpd.Ident(nme.WILDCARD).withType(sym.typeRef)) - } - def typeBindFromSym(sym: TypeSymbol)(using Context): tpd.Bind = sym.defTree match { + def typeBindFromSym(sym: TypeSymbol)(using Context): tpd.Bind = sym.defTree match case tree: tpd.Bind => tree case tpd.EmptyTree => tpd.Bind(sym, untpd.Ident(nme.WILDCARD).withType(sym.typeRef)) - } -} diff --git a/compiler/src/dotty/tools/dotc/report.scala b/compiler/src/dotty/tools/dotc/report.scala index 38f2ab347c4c..408c40115da0 100644 --- a/compiler/src/dotty/tools/dotc/report.scala +++ b/compiler/src/dotty/tools/dotc/report.scala @@ -106,18 +106,16 @@ object report: def debuglog(msg: => String)(using Context): Unit = if (ctx.debug) log(msg) - def informTime(msg: => String, start: Long)(using Context): Unit = { + def informTime(msg: => String, start: Long)(using Context): Unit = def elapsed = s" in ${currentTimeMillis - start}ms" informProgress(msg + elapsed) - } def informProgress(msg: => String)(using Context): Unit = inform("[" + msg + "]") - def logWith[T](msg: => String)(value: T)(using Context): T = { + def logWith[T](msg: => String)(value: T)(using Context): T = log(msg + " " + value) value - } def debugwarn(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = if (ctx.settings.Ydebug.value) warning(msg, pos) @@ -131,7 +129,7 @@ object report: private object messageRendering extends MessageRendering // Should only be called from Run#enrichErrorMessage. - def enrichErrorMessage(errorMessage: String)(using Context): String = try { + def enrichErrorMessage(errorMessage: String)(using Context): String = try def formatExplain(pairs: List[(String, Any)]) = pairs.map((k, v) => f"$k%20s: $v").mkString("\n") val settings = ctx.settings.userSetSettings(ctx.settingsState).sortBy(_.name) @@ -187,5 +185,5 @@ object report: |$info2 | |$context_s""".stripMargin - } catch case _: Throwable => errorMessage // don't introduce new errors trying to report errors, so swallow exceptions + catch case _: Throwable => errorMessage // don't introduce new errors trying to report errors, so swallow exceptions end report diff --git a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala index a95af962c053..4285aab15498 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ConsoleReporter.scala @@ -12,26 +12,21 @@ import Diagnostic.Error class ConsoleReporter( reader: BufferedReader = Console.in, writer: PrintWriter = new PrintWriter(Console.err, true) -) extends ConsoleReporter.AbstractConsoleReporter { +) extends ConsoleReporter.AbstractConsoleReporter: override def printMessage(msg: String): Unit = { writer.print(msg + "\n"); writer.flush() } override def flush()(using Context): Unit = writer.flush() - override def doReport(dia: Diagnostic)(using Context): Unit = { + override def doReport(dia: Diagnostic)(using Context): Unit = super.doReport(dia) dia match case dia: Error if ctx.settings.Xprompt.value => Reporter.displayPrompt(reader, writer) case _ => - } -} -object ConsoleReporter { - abstract class AbstractConsoleReporter extends AbstractReporter { +object ConsoleReporter: + abstract class AbstractConsoleReporter extends AbstractReporter: /** Prints the message. */ def printMessage(msg: String): Unit /** Prints the message with the given position indication. */ - def doReport(dia: Diagnostic)(using Context): Unit = { + def doReport(dia: Diagnostic)(using Context): Unit = printMessage(messageAndPos(dia)) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala index 624aa93924e8..510e0e8f109e 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Diagnostic.scala @@ -39,11 +39,10 @@ object Diagnostic: class Warning( msg: Message, pos: SourcePosition - ) extends Diagnostic(msg, pos, WARNING) { + ) extends Diagnostic(msg, pos, WARNING): def toError: Error = new Error(msg, pos).tap(e => if isVerbose then e.setVerbose()) def toInfo: Info = new Info(msg, pos).tap(e => if isVerbose then e.setVerbose()) def isSummarizedConditional(using Context): Boolean = false - } class Info( msg: Message, @@ -54,31 +53,27 @@ object Diagnostic: abstract class ConditionalWarning( msg: Message, pos: SourcePosition - ) extends Warning(msg, pos) { + ) extends Warning(msg, pos): def enablingOption(using Context): Setting[Boolean] override def isSummarizedConditional(using Context): Boolean = !enablingOption.value - } class FeatureWarning( msg: Message, pos: SourcePosition - ) extends ConditionalWarning(msg, pos) { + ) extends ConditionalWarning(msg, pos): def enablingOption(using Context): Setting[Boolean] = ctx.settings.feature - } class UncheckedWarning( msg: Message, pos: SourcePosition - ) extends ConditionalWarning(msg, pos) { + ) extends ConditionalWarning(msg, pos): def enablingOption(using Context): Setting[Boolean] = ctx.settings.unchecked - } class DeprecationWarning( msg: Message, pos: SourcePosition - ) extends ConditionalWarning(msg, pos) { + ) extends ConditionalWarning(msg, pos): def enablingOption(using Context): Setting[Boolean] = ctx.settings.deprecation - } class MigrationWarning( msg: Message, diff --git a/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala b/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala index 9b6a3c75ba5d..eae9e9eb4331 100644 --- a/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/HideNonSensicalMessages.scala @@ -7,14 +7,12 @@ import core.Contexts._ /** * This trait implements `isHidden` so that we avoid reporting non-sensical messages. */ -trait HideNonSensicalMessages extends Reporter { +trait HideNonSensicalMessages extends Reporter: /** Hides non-sensical messages, unless we haven't reported any error yet or * `-Yshow-suppressed-errors` is set. */ override def isHidden(dia: Diagnostic)(using Context): Boolean = - super.isHidden(dia) || { + super.isHidden(dia) `||`: dia.msg.isNonSensical && hasErrors && // if there are no errors yet, report even if diagnostic is non-sensical !ctx.settings.YshowSuppressedErrors.value - } -} diff --git a/compiler/src/dotty/tools/dotc/reporting/Message.scala b/compiler/src/dotty/tools/dotc/reporting/Message.scala index a1fe6773c1d2..e336c53c76eb 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Message.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Message.scala @@ -79,12 +79,11 @@ object Message: * class symbol instead. This normalization avoids recording e.g. scala.List * and scala.collection.immutable.List as two different types */ - def followAlias(e1: Recorded): Recorded = e1 match { + def followAlias(e1: Recorded): Recorded = e1 match case e1: Symbol if e1.isAliasType => val underlying = e1.typeRef.underlyingClassRef(refinementOK = false).typeSymbol if (underlying.name == e1.name) underlying else e1 case _ => e1 - } val key = SeenKey(str, isType) val existing = seen(key) lazy val dealiased = followAlias(entry) @@ -95,21 +94,20 @@ object Message: alts = entry :: existing seen(key) = alts - val suffix = alts.length match { + val suffix = alts.length match case 1 => "" case n => n.toString.toCharArray.map { - case '0' => '⁰' - case '1' => '¹' - case '2' => '²' - case '3' => '³' - case '4' => '⁴' - case '5' => '⁵' - case '6' => '⁶' - case '7' => '⁷' - case '8' => '⁸' - case '9' => '⁹' - }.mkString - } + case '0' => '⁰' + case '1' => '¹' + case '2' => '²' + case '3' => '³' + case '4' => '⁴' + case '5' => '⁵' + case '6' => '⁶' + case '7' => '⁷' + case '8' => '⁸' + case '9' => '⁹' + }.mkString str + suffix end record @@ -118,23 +116,21 @@ object Message: def boundStr(bound: Type, default: ClassSymbol, cmp: String) = if (bound.isRef(default)) "" else i"$cmp $bound" - def boundsStr(bounds: TypeBounds): String = { + def boundsStr(bounds: TypeBounds): String = val lo = boundStr(bounds.lo, defn.NothingClass, ">:") val hi = boundStr(bounds.hi, defn.AnyClass, "<:") if (lo.isEmpty) hi else if (hi.isEmpty) lo else s"$lo and $hi" - } - def addendum(cat: String, info: Type): String = info match { + def addendum(cat: String, info: Type): String = info match case bounds @ TypeBounds(lo, hi) if bounds ne TypeBounds.empty => if (lo eq hi) i" which is an alias of $lo" else i" with $cat ${boundsStr(bounds)}" case _ => "" - } - entry match { + entry match case param: TypeParamRef => s"is a type variable${addendum("constraint", TypeComparer.bounds(param))}" case param: TermParamRef => @@ -148,41 +144,35 @@ object Message: s"is a ${ctx.printer.kindString(sym)}${sym.showExtendedLocation}${addendum("bounds", info)}" case tp: SkolemType => s"is an unknown value of type ${tp.widen.show}" - } end explanation /** Produce a where clause with explanations for recorded iterms. */ def explanations(using Context): String = - def needsExplanation(entry: Recorded) = entry match { + def needsExplanation(entry: Recorded) = entry match case param: TypeParamRef => ctx.typerState.constraint.contains(param) case param: ParamRef => false case skolem: SkolemType => true case sym: Symbol => ctx.gadt.contains(sym) && ctx.gadt.fullBounds(sym) != TypeBounds.empty - } val toExplain: List[(String, Recorded)] = seen.toList.flatMap { kvs => - val res: List[(String, Recorded)] = kvs match { + val res: List[(String, Recorded)] = kvs match case (key, entry :: Nil) => if (needsExplanation(entry)) (key.str, entry) :: Nil else Nil case (key, entries) => - for (alt <- entries) yield { + for (alt <- entries) yield val tickedString = record(key.str, key.isType, alt) (tickedString, alt) - } - } res // help the inferrencer out }.sortBy(_._1) - def columnar(parts: List[(String, String)]): List[String] = { + def columnar(parts: List[(String, String)]): List[String] = lazy val maxLen = parts.map(_._1.length).max - parts.map { + parts.map: case (leader, trailer) => val variable = hl(leader) s"""$variable${" " * (maxLen - leader.length)} $trailer""" - } - } val explainParts = toExplain.map { case (str, entry) => (str, explanation(entry)) } val explainLines = columnar(explainParts) @@ -392,19 +382,17 @@ trait NoDisambiguation extends Message: withoutDisambiguation() /** The fallback `Message` containing no explanation and having no `kind` */ -final class NoExplanation(msgFn: Context ?=> String)(using Context) extends Message(ErrorMessageID.NoExplanationID) { +final class NoExplanation(msgFn: Context ?=> String)(using Context) extends Message(ErrorMessageID.NoExplanationID): def msg(using Context): String = msgFn def explain(using Context): String = "" val kind: MessageKind = MessageKind.NoKind override def toString(): String = msg -} /** The extractor for `NoExplanation` can be used to check whether any error * lacks an explanation */ -object NoExplanation { +object NoExplanation: def unapply(m: Message): Option[Message] = if (m.explanation == "") Some(m) else None -} diff --git a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala index f53359fb8b19..0fa0ec19950f 100644 --- a/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala +++ b/compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala @@ -17,7 +17,7 @@ import scala.annotation.switch import scala.collection.mutable -trait MessageRendering { +trait MessageRendering: import Highlight.* import Offsets.* @@ -39,27 +39,24 @@ trait MessageRendering { * * @return (lines before error, lines after error, line numbers offset) */ - private def sourceLines(pos: SourcePosition)(using Context, Level, Offset): (List[String], List[String], Int) = { + private def sourceLines(pos: SourcePosition)(using Context, Level, Offset): (List[String], List[String], Int) = assert(pos.exists && pos.source.file.exists) var maxLen = Int.MinValue - def render(offsetAndLine: (Int, String)): String = { + def render(offsetAndLine: (Int, String)): String = val (offset1, line) = offsetAndLine val lineNbr = (pos.source.offsetToLine(offset1) + 1).toString val prefix = String.format(s"%${offset - 2}s |", lineNbr) maxLen = math.max(maxLen, prefix.length) val lnum = hl(" " * math.max(0, maxLen - prefix.length - 1) + prefix) lnum + line.stripLineEnd - } - def linesFrom(arr: Array[Char]): List[String] = { - def pred(c: Char) = (c: @switch) match { + def linesFrom(arr: Array[Char]): List[String] = + def pred(c: Char) = (c: @switch) match case LF | CR | FF | SU => true case _ => false - } val (line, rest0) = arr.span(!pred(_)) val (_, rest) = rest0.span(pred) new String(line) :: { if (rest.isEmpty) Nil else linesFrom(rest) } - } val syntax = if (ctx.settings.color.value != "never") @@ -73,7 +70,6 @@ trait MessageRendering { after.zip(lines.drop(before.length)).map(render), maxLen ) - } /** Generate box containing the report title * @@ -92,14 +88,13 @@ trait MessageRendering { * | ^^^^^ * ``` */ - private def positionMarker(pos: SourcePosition)(using Context, Level, Offset): String = { + private def positionMarker(pos: SourcePosition)(using Context, Level, Offset): String = val padding = pos.startColumnPadding val carets = if (pos.startLine == pos.endLine) "^" * math.max(1, pos.endColumn - pos.startColumn) else "^" hl(s"$offsetBox$padding$carets") - } /** The horizontal line with the given offset * @@ -144,7 +139,7 @@ trait MessageRendering { * * @return aligned error message */ - private def errorMsg(pos: SourcePosition, msg: String)(using Context, Level, Offset): String = { + private def errorMsg(pos: SourcePosition, msg: String)(using Context, Level, Offset): String = val padding = msg.linesIterator.foldLeft(pos.startColumnPadding) { (pad, line) => val lineLength = stripColor(line).length val maxPad = math.max(0, ctx.settings.pageWidth.value - offset - lineLength) - offset @@ -156,7 +151,6 @@ trait MessageRendering { msg.linesIterator .map { line => offsetBox + (if line.isEmpty then "" else padding + line) } .mkString(EOL) - } /** The source file path, line and column numbers from the given SourcePosition */ protected def posFileStr(pos: SourcePosition): String = @@ -198,7 +192,7 @@ trait MessageRendering { end posStr /** Explanation rendered under "Explanation" header */ - def explanation(m: Message)(using Context): String = { + def explanation(m: Message)(using Context): String = val sb = new StringBuilder( s"""| |${Blue("Explanation").show} @@ -207,17 +201,15 @@ trait MessageRendering { sb.append(EOL).append(m.explanation) if (!m.explanation.endsWith(EOL)) sb.append(EOL) sb.toString - } private def appendFilterHelp(dia: Diagnostic, sb: mutable.StringBuilder): Unit = import dia._ val hasId = msg.errorId.errorNumber >= 0 - val category = dia match { + val category = dia match case _: UncheckedWarning => "unchecked" case _: DeprecationWarning => "deprecation" case _: FeatureWarning => "feature" case _ => "" - } if (hasId || category.nonEmpty) sb.append(EOL).append("Matching filters for @nowarn or -Wconf:") if (hasId) @@ -227,7 +219,7 @@ trait MessageRendering { sb.append(EOL).append(" - cat=").append(category) /** The whole message rendered from `msg` */ - def messageAndPos(dia: Diagnostic)(using Context): String = { + def messageAndPos(dia: Diagnostic)(using Context): String = import dia._ val pos1 = pos.nonInlined val inlineStack = inlinePosStack(pos).filter(_ != pos1) @@ -239,9 +231,9 @@ trait MessageRendering { val sb = mutable.StringBuilder() val posString = posStr(pos, msg, diagnosticLevel(dia)) if (posString.nonEmpty) sb.append(posString).append(EOL) - if (pos.exists) { + if (pos.exists) val pos1 = pos.nonInlined - if (pos1.exists && pos1.source.file.exists) { + if (pos1.exists && pos1.source.file.exists) val (srcBefore, srcAfter, offset) = sourceLines(pos1) val marker = positionMarker(pos1) val err = errorMsg(pos1, msg.message) @@ -258,9 +250,7 @@ trait MessageRendering { val marker = positionMarker(inlinedPos) sb.append(EOL).append((srcBefore ::: marker :: srcAfter).mkString(EOL)) sb.append(EOL).append(endBox) - } else sb.append(msg.message) - } else sb.append(msg.message) if (dia.isVerbose) appendFilterHelp(dia, sb) @@ -278,7 +268,6 @@ trait MessageRendering { sb.append(EOL).append(offsetBox).append(" longer explanation available when compiling with `-explain`") sb.toString - } private def hl(str: String)(using Context, Level): String = summon[Level].value match @@ -287,25 +276,22 @@ trait MessageRendering { case interfaces.Diagnostic.INFO => Blue(str).show private def diagnosticLevel(dia: Diagnostic): String = - dia match { + dia match case dia: FeatureWarning => "Feature Warning" case dia: DeprecationWarning => "Deprecation Warning" case dia: UncheckedWarning => "Unchecked Warning" case dia: MigrationWarning => "Migration Warning" case _ => dia.level match // Diagnostic isn't sealed (e.g. created in the REPL) so provide a fallback - case interfaces.Diagnostic.ERROR => "Error" - case interfaces.Diagnostic.WARNING => "Warning" - case interfaces.Diagnostic.INFO => "Info" - } + case interfaces.Diagnostic.ERROR => "Error" + case interfaces.Diagnostic.WARNING => "Warning" + case interfaces.Diagnostic.INFO => "Info" -} -private object Highlight { +private object Highlight: opaque type Level = Int extension (level: Level) def value: Int = level object Level: def apply(level: Int): Level = level -} /** Size of the left offset added by the box * @@ -316,9 +302,8 @@ private object Highlight { * ^^^ // size of this offset * ``` */ -private object Offsets { +private object Offsets: opaque type Offset = Int def offset(using o: Offset): Int = o object Offset: def apply(level: Int): Offset = level -} diff --git a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala index f5aadac27296..58f01e8145eb 100644 --- a/compiler/src/dotty/tools/dotc/reporting/Reporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/Reporter.scala @@ -16,18 +16,16 @@ import scala.annotation.internal.sharable import scala.collection.mutable import core.Decorators.em -object Reporter { +object Reporter: /** Convert a SimpleReporter into a real Reporter */ def fromSimpleReporter(simple: interfaces.SimpleReporter): Reporter = - new Reporter with UniqueMessagePositions with HideNonSensicalMessages { + new Reporter with UniqueMessagePositions with HideNonSensicalMessages: override def doReport(dia: Diagnostic)(using Context): Unit = simple.report(dia) - } /** A reporter that ignores reports, and doesn't record errors */ - @sharable object NoReporter extends Reporter { + @sharable object NoReporter extends Reporter: def doReport(dia: Diagnostic)(using Context): Unit = () override def report(dia: Diagnostic)(using Context): Unit = () - } type ErrorHandler = (Diagnostic, Context) => Unit @@ -35,12 +33,12 @@ object Reporter { (mc, ctx) => ctx.reporter.report(mc)(using ctx) /** Show prompt if `-Xprompt` is passed as a flag to the compiler */ - def displayPrompt(reader: BufferedReader, writer: PrintWriter): Unit = { + def displayPrompt(reader: BufferedReader, writer: PrintWriter): Unit = writer.println() writer.print("a)bort, s)tack, r)esume: ") writer.flush() - if (reader != null) { - def loop(): Unit = reader.read match { + if (reader != null) + def loop(): Unit = reader.read match case 'a' | 'A' => new Throwable().printStackTrace(writer) System.exit(1) @@ -52,17 +50,13 @@ object Reporter { () case _ => loop() - } loop() - } - } -} /** * This interface provides methods to issue information, warning and * error messages. */ -abstract class Reporter extends interfaces.ReporterResult { +abstract class Reporter extends interfaces.ReporterResult: import Reporter._ /** Report a diagnostic */ @@ -74,21 +68,19 @@ abstract class Reporter extends interfaces.ReporterResult { */ private var _truncationOK: Boolean = true def truncationOK: Boolean = _truncationOK - def withoutTruncating[T](body: => T): T = { + def withoutTruncating[T](body: => T): T = val saved = _truncationOK _truncationOK = false try body finally _truncationOK = saved - } private var incompleteHandler: ErrorHandler = defaultIncompleteHandler - def withIncompleteHandler[T](handler: ErrorHandler)(op: => T): T = { + def withIncompleteHandler[T](handler: ErrorHandler)(op: => T): T = val saved = incompleteHandler incompleteHandler = handler try op finally incompleteHandler = saved - } private def isIncompleteChecking = incompleteHandler ne defaultIncompleteHandler @@ -122,11 +114,10 @@ abstract class Reporter extends interfaces.ReporterResult { /** Run `op` and return `true` if errors were reported by this reporter. */ - def reportsErrorsFor(op: Context ?=> Unit)(using Context): Boolean = { + def reportsErrorsFor(op: Context ?=> Unit)(using Context): Boolean = val initial = errorCount op errorCount > initial - } private var reportedFeaturesUseSites = Set[Symbol]() @@ -156,7 +147,7 @@ abstract class Reporter extends interfaces.ReporterResult { if !isHidden(d) then // avoid isHidden test for summarized warnings so that message is not forced markReported(d) withMode(Mode.Printing)(doReport(d)) - d match { + d match case _: Warning => _warningCount += 1 case e: Error => errors = e :: errors @@ -165,7 +156,6 @@ abstract class Reporter extends interfaces.ReporterResult { ctx.base.errorsToBeReported = true case _: Info => // nothing to do here // match error if d is something else - } end issueUnconfigured def issueIfNotSuppressed(dia: Diagnostic)(using Context): Unit = @@ -173,11 +163,11 @@ abstract class Reporter extends interfaces.ReporterResult { import Action._ dia match case w: Warning => WConf.parsed.action(w) match - case Error => issueUnconfigured(w.toError) - case Warning => issueUnconfigured(w) - case Verbose => issueUnconfigured(w.setVerbose()) - case Info => issueUnconfigured(w.toInfo) - case Silent => + case Error => issueUnconfigured(w.toError) + case Warning => issueUnconfigured(w) + case Verbose => issueUnconfigured(w.setVerbose()) + case Info => issueUnconfigured(w.toInfo) + case Silent => case _ => issueUnconfigured(dia) // `ctx.run` can be null in test, also in the repl when parsing the first line. The parser runs early, the Run is @@ -206,14 +196,13 @@ abstract class Reporter extends interfaces.ReporterResult { incompleteHandler(dia, ctx) /** Summary of warnings and errors */ - def summary: String = { + def summary: String = val b = new mutable.ListBuffer[String] if (warningCount > 0) b += countString(warningCount, "warning") + " found" if (errorCount > 0) b += countString(errorCount, "error") + " found" b.mkString("\n") - } def summarizeUnreportedWarnings()(using Context): Unit = for (settingName, count) <- unreportedWarnings do @@ -222,17 +211,15 @@ abstract class Reporter extends interfaces.ReporterResult { report(Warning(msg, NoSourcePosition)) /** Print the summary of warnings and errors */ - def printSummary()(using Context): Unit = { + def printSummary()(using Context): Unit = val s = summary if (s != "") report(new Info(s, NoSourcePosition)) - } /** Returns a string meaning "n elements". */ - protected def countString(n: Int, elements: String): String = n match { + protected def countString(n: Int, elements: String): String = n match case 0 => s"no ${elements}s" case 1 => s"1 ${elements}" case _ => s"$n ${elements}s" - } /** Should this diagnostic not be reported at all? */ def isHidden(dia: Diagnostic)(using Context): Boolean = @@ -265,4 +252,3 @@ abstract class Reporter extends interfaces.ReporterResult { /** If this reporter buffers messages, all buffered messages, otherwise Nil */ def pendingMessages(using Context): List[Diagnostic] = Nil -} diff --git a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala index 9783a3208a60..144a0a88e3d3 100644 --- a/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/StoreReporter.scala @@ -17,15 +17,14 @@ import Diagnostic._ * - The reporter is not flushed and the message containers capture a * `Context` (about 4MB) */ -class StoreReporter(outer: Reporter | Null = Reporter.NoReporter, fromTyperState: Boolean = false) extends Reporter { +class StoreReporter(outer: Reporter | Null = Reporter.NoReporter, fromTyperState: Boolean = false) extends Reporter: protected var infos: mutable.ListBuffer[Diagnostic] | Null = null - def doReport(dia: Diagnostic)(using Context): Unit = { + def doReport(dia: Diagnostic)(using Context): Unit = typr.println(s">>>> StoredError: ${dia.message}") // !!! DEBUG if (infos == null) infos = new mutable.ListBuffer infos.uncheckedNN += dia - } override def hasUnreportedErrors: Boolean = outer != null && infos != null && infos.uncheckedNN.exists(_.isInstanceOf[Error]) @@ -48,4 +47,3 @@ class StoreReporter(outer: Reporter | Null = Reporter.NoReporter, fromTyperState override def report(dia: Diagnostic)(using Context): Unit = if fromTyperState then issueUnconfigured(dia) else super.report(dia) -} diff --git a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala index 153212522541..5990ad611b14 100644 --- a/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala +++ b/compiler/src/dotty/tools/dotc/reporting/ThrowingReporter.scala @@ -9,12 +9,10 @@ import Diagnostic.Error * This class implements a Reporter that throws all errors as UnhandledError exceptions * and sends warnings and other info to the underlying reporter. */ -class ThrowingReporter(reportInfo: Reporter) extends Reporter { - def doReport(dia: Diagnostic)(using Context): Unit = dia match { +class ThrowingReporter(reportInfo: Reporter) extends Reporter: + def doReport(dia: Diagnostic)(using Context): Unit = dia match case dia: Error => throw UnhandledError(dia) case _ => reportInfo.doReport(dia) - } -} class UnhandledError(val diagnostic: Error) extends Exception: override def getMessage = diagnostic.message diff --git a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala index 98fd7da3032a..9abffbf7acd6 100644 --- a/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala +++ b/compiler/src/dotty/tools/dotc/reporting/UniqueMessagePositions.scala @@ -8,7 +8,7 @@ import core.Contexts._ /** This trait implements `isHidden` so that multiple messages per position * are suppressed, unless they are of increasing severity. */ -trait UniqueMessagePositions extends Reporter { +trait UniqueMessagePositions extends Reporter: private val positions = new mutable.HashMap[(SourceFile, Integer), Diagnostic] @@ -35,4 +35,3 @@ trait UniqueMessagePositions extends Reporter { case Some(dia1) if dia1.hides(dia) => case _ => positions((ctx.source, pos)) = dia super.markReported(dia) -} diff --git a/compiler/src/dotty/tools/dotc/reporting/WConf.scala b/compiler/src/dotty/tools/dotc/reporting/WConf.scala index af1a5c0f0f47..cf708dc6e40b 100644 --- a/compiler/src/dotty/tools/dotc/reporting/WConf.scala +++ b/compiler/src/dotty/tools/dotc/reporting/WConf.scala @@ -66,25 +66,25 @@ object WConf: def parseFilter(s: String): Either[String, MessageFilter] = s match case "any" => Right(Any) case Splitter(filter, conf) => filter match - case "msg" => regex(conf).map(MessagePattern.apply) - case "id" => conf match - case ErrorId(num) => - ErrorMessageID.fromErrorNumber(num.toInt) match - case Some(errId) if errId.isActive => Right(MessageID(errId)) - case Some(errId) => Left(s"E${num} is marked as inactive.") - case _ => Left(s"Unknown error message number: E${num}") - case _ => - Left(s"invalid error message id: $conf") - case "name" => - try Right(MessageID(ErrorMessageID.valueOf(conf + "ID"))) - catch case _: IllegalArgumentException => Left(s"unknown error message name: $conf") - - case "cat" => conf match - case "deprecation" => Right(Deprecated) - case "feature" => Right(Feature) - case "unchecked" => Right(Unchecked) - case _ => Left(s"unknown category: $conf") - case _ => Left(s"unknown filter: $filter") + case "msg" => regex(conf).map(MessagePattern.apply) + case "id" => conf match + case ErrorId(num) => + ErrorMessageID.fromErrorNumber(num.toInt) match + case Some(errId) if errId.isActive => Right(MessageID(errId)) + case Some(errId) => Left(s"E${num} is marked as inactive.") + case _ => Left(s"Unknown error message number: E${num}") + case _ => + Left(s"invalid error message id: $conf") + case "name" => + try Right(MessageID(ErrorMessageID.valueOf(conf + "ID"))) + catch case _: IllegalArgumentException => Left(s"unknown error message name: $conf") + + case "cat" => conf match + case "deprecation" => Right(Deprecated) + case "feature" => Right(Feature) + case "unchecked" => Right(Unchecked) + case _ => Left(s"unknown category: $conf") + case _ => Left(s"unknown filter: $filter") case _ => Left(s"unknown filter: $s") def parsed(using Context): WConf = diff --git a/compiler/src/dotty/tools/dotc/reporting/messages.scala b/compiler/src/dotty/tools/dotc/reporting/messages.scala index d205b816214c..ab01518385d1 100644 --- a/compiler/src/dotty/tools/dotc/reporting/messages.scala +++ b/compiler/src/dotty/tools/dotc/reporting/messages.scala @@ -83,12 +83,11 @@ abstract class ReferenceMsg(errorId: ErrorMessageID)(using Context) extends Mess def kind = MessageKind.Reference abstract class EmptyCatchOrFinallyBlock(tryBody: untpd.Tree, errNo: ErrorMessageID)(using Context) -extends SyntaxMsg(errNo) { - def explain(using Context) = { - val tryString = tryBody match { +extends SyntaxMsg(errNo): + def explain(using Context) = + val tryString = tryBody match case Block(Nil, untpd.EmptyTree) => "{}" case _ => tryBody.show - } val code1 = s"""|import scala.util.control.NonFatal @@ -115,35 +114,30 @@ extends SyntaxMsg(errNo) { | |It is recommended to use the ${hl("NonFatal")} extractor to catch all exceptions as it |correctly handles transfer functions like ${hl("return")}.""" - } -} class EmptyCatchBlock(tryBody: untpd.Tree)(using Context) -extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchBlockID) { +extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchBlockID): def msg(using Context) = i"""|The ${hl("catch")} block does not contain a valid expression, try |adding a case like - ${hl("case e: Exception =>")} to the block""" -} class EmptyCatchAndFinallyBlock(tryBody: untpd.Tree)(using Context) -extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchAndFinallyBlockID) { +extends EmptyCatchOrFinallyBlock(tryBody, EmptyCatchAndFinallyBlockID): def msg(using Context) = i"""|A ${hl("try")} without ${hl("catch")} or ${hl("finally")} is equivalent to putting |its body in a block; no exceptions are handled.""" -} class DeprecatedWithOperator()(using Context) -extends SyntaxMsg(DeprecatedWithOperatorID) { +extends SyntaxMsg(DeprecatedWithOperatorID): def msg(using Context) = i"""${hl("with")} as a type operator has been deprecated; use ${hl("&")} instead""" def explain(using Context) = i"""|Dotty introduces intersection types - ${hl("&")} types. These replace the |use of the ${hl("with")} keyword. There are a few differences in |semantics between intersection types and using ${hl("with")}.""" -} class CaseClassMissingParamList(cdef: untpd.TypeDef)(using Context) -extends SyntaxMsg(CaseClassMissingParamListID) { +extends SyntaxMsg(CaseClassMissingParamListID): def msg(using Context) = i"""|A ${hl("case class")} must have at least one parameter list""" @@ -151,14 +145,13 @@ extends SyntaxMsg(CaseClassMissingParamListID) { i"""|${cdef.name} must have at least one parameter list, if you would rather |have a singleton representation of ${cdef.name}, use a "${hl("case object")}". |Or, add an explicit ${hl("()")} as a parameter list to ${cdef.name}.""" -} class AnonymousFunctionMissingParamType(param: untpd.ValDef, tree: untpd.Function, pt: Type) (using Context) -extends TypeMsg(AnonymousFunctionMissingParamTypeID) { - def msg(using Context) = { +extends TypeMsg(AnonymousFunctionMissingParamTypeID): + def msg(using Context) = val ofFun = if param.name.is(WildcardParamName) || (MethodType.syntheticParamNames(tree.args.length + 1) contains param.name) @@ -172,13 +165,11 @@ extends TypeMsg(AnonymousFunctionMissingParamTypeID) { i"""Missing parameter type | |I could not infer the type of the parameter ${param.name}$ofFun.$inferred""" - } def explain(using Context) = "" -} class WildcardOnTypeArgumentNotAllowedOnNew()(using Context) -extends SyntaxMsg(WildcardOnTypeArgumentNotAllowedOnNewID) { +extends SyntaxMsg(WildcardOnTypeArgumentNotAllowedOnNewID): def msg(using Context) = "Type argument must be fully defined" def explain(using Context) = val code1: String = @@ -205,24 +196,22 @@ extends SyntaxMsg(WildcardOnTypeArgumentNotAllowedOnNewID) { |You must complete all the type parameters, for instance: | |$code2 """ -} // Type Errors ------------------------------------------------------------ // class DuplicateBind(bind: untpd.Bind, tree: untpd.CaseDef)(using Context) -extends NamingMsg(DuplicateBindID) { +extends NamingMsg(DuplicateBindID): def msg(using Context) = i"duplicate pattern variable: ${bind.name}" - def explain(using Context) = { + def explain(using Context) = val pat = tree.pat.show val guard = tree.guard match case untpd.EmptyTree => "" case guard => s"if ${guard.show}" - val body = tree.body match { + val body = tree.body match case Block(Nil, untpd.EmptyTree) => "" case body => s" ${body.show}" - } val caseDef = s"case $pat$guard => $body" @@ -231,19 +220,15 @@ extends NamingMsg(DuplicateBindID) { |$caseDef | |${bind.name} is not unique. Rename one of the bound variables!""" - } -} class MissingIdent(tree: untpd.Ident, treeKind: String, val name: Name)(using Context) -extends NotFoundMsg(MissingIdentID) { +extends NotFoundMsg(MissingIdentID): def msg(using Context) = i"Not found: $treeKind$name" - def explain(using Context) = { + def explain(using Context) = i"""|The identifier for `$treeKind$name` is not bound, that is, |no declaration for this identifier can be found. |That can happen, for example, if `$name` or its declaration has either been |misspelt or if an import is missing.""" - } -} class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], addenda: => String*)(using Context) extends TypeMismatchMsg(found, expected)(TypeMismatchID): @@ -301,10 +286,10 @@ class TypeMismatch(found: Type, expected: Type, inTree: Option[untpd.Tree], adde end TypeMismatch class NotAMember(site: Type, val name: Name, selected: String, addendum: => String = "")(using Context) -extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) { +extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site): //println(i"site = $site, decls = ${site.decls}, source = ${site.typeSymbol.sourceFile}") //DEBUG - def msg(using Context) = { + def msg(using Context) = import core.Flags._ val maxDist = 3 // maximal number of differences to be considered for a hint val missing = name.show @@ -371,16 +356,14 @@ extends NotFoundMsg(NotAMemberID), ShowMatchTrace(site) { case Nil => prefixEnumClause("") i"$selected $name is not a member of ${site.widen}$finalAddendum" - } def explain(using Context) = "" -} class EarlyDefinitionsNotSupported()(using Context) -extends SyntaxMsg(EarlyDefinitionsNotSupportedID) { +extends SyntaxMsg(EarlyDefinitionsNotSupportedID): def msg(using Context) = "Early definitions are not supported; use trait parameters instead" - def explain(using Context) = { + def explain(using Context) = val code1 = """|trait Logging { | val f: File @@ -418,14 +401,12 @@ extends SyntaxMsg(EarlyDefinitionsNotSupportedID) { | |$code2 |""" - } -} class TopLevelImplicitClass(cdef: untpd.TypeDef)(using Context) -extends SyntaxMsg(TopLevelImplicitClassID) { +extends SyntaxMsg(TopLevelImplicitClassID): def msg(using Context) = i"""An ${hl("implicit class")} may not be top-level""" - def explain(using Context) = { + def explain(using Context) = val TypeDef(name, impl @ Template(constr0, parents, self, _)) = cdef: @unchecked val exampleArgs = if(constr0.termParamss.isEmpty) "..." @@ -446,11 +427,9 @@ extends SyntaxMsg(TopLevelImplicitClassID) { | |// At the use site: |import Implicits.${cdef.name}""" - } -} class ImplicitCaseClass(cdef: untpd.TypeDef)(using Context) -extends SyntaxMsg(ImplicitCaseClassID) { +extends SyntaxMsg(ImplicitCaseClassID): def msg(using Context) = i"""A ${hl("case class")} may not be defined as ${hl("implicit")}""" def explain(using Context) = @@ -459,12 +438,11 @@ extends SyntaxMsg(ImplicitCaseClassID) { |implicit class ${cdef.name}... | |""" -} class ImplicitClassPrimaryConstructorArity()(using Context) -extends SyntaxMsg(ImplicitClassPrimaryConstructorArityID){ +extends SyntaxMsg(ImplicitClassPrimaryConstructorArityID): def msg(using Context) = "Implicit classes must accept exactly one primary constructor parameter" - def explain(using Context) = { + def explain(using Context) = val example = "implicit class RichDate(date: java.util.Date)" i"""Implicit classes may only take one non-implicit argument in their constructor. For example: | @@ -473,14 +451,12 @@ extends SyntaxMsg(ImplicitClassPrimaryConstructorArityID){ |While it’s possible to create an implicit class with more than one non-implicit argument, |such classes aren’t used during implicit lookup. |""" - } -} class ObjectMayNotHaveSelfType(mdef: untpd.ModuleDef)(using Context) -extends SyntaxMsg(ObjectMayNotHaveSelfTypeID) { +extends SyntaxMsg(ObjectMayNotHaveSelfTypeID): def msg(using Context) = i"""${hl("object")}s must not have a self ${hl("type")}""" - def explain(using Context) = { + def explain(using Context) = val untpd.ModuleDef(name, tmpl) = mdef val ValDef(_, selfTpt, _) = tmpl.self i"""|${hl("object")}s must not have a self ${hl("type")}: @@ -490,14 +466,12 @@ extends SyntaxMsg(ObjectMayNotHaveSelfTypeID) { | - Let the object extend a trait containing the self type: | | object $name extends ${selfTpt.show}""" - } -} class RepeatedModifier(modifier: String)(implicit ctx:Context) -extends SyntaxMsg(RepeatedModifierID) { +extends SyntaxMsg(RepeatedModifierID): def msg(using Context) = i"""Repeated modifier $modifier""" - def explain(using Context) = { + def explain(using Context) = val code1 = "private private val Origin = Point(0, 0)" val code2 = "private final val Origin = Point(0, 0)" i"""This happens when you accidentally specify the same modifier twice. @@ -511,13 +485,11 @@ extends SyntaxMsg(RepeatedModifierID) { |$code2 | |""" - } -} class InterpolatedStringError()(implicit ctx:Context) -extends SyntaxMsg(InterpolatedStringErrorID) { +extends SyntaxMsg(InterpolatedStringErrorID): def msg(using Context) = "Error in interpolated string: identifier or block expected" - def explain(using Context) = { + def explain(using Context) = val code1 = "s\"$new Point(0, 0)\"" val code2 = "s\"${new Point(0, 0)}\"" i"""|This usually happens when you forget to place your expressions inside curly braces. @@ -528,11 +500,9 @@ extends SyntaxMsg(InterpolatedStringErrorID) { | |$code2 |""" - } -} class UnboundPlaceholderParameter()(implicit ctx:Context) -extends SyntaxMsg(UnboundPlaceholderParameterID) { +extends SyntaxMsg(UnboundPlaceholderParameterID): def msg(using Context) = i"""Unbound placeholder parameter; incorrect use of ${hl("_")}""" def explain(using Context) = i"""|The ${hl("_")} placeholder syntax was used where it could not be bound. @@ -575,38 +545,33 @@ extends SyntaxMsg(UnboundPlaceholderParameterID) { | |${hl("trait A { this: B => ... ")} |""" -} class IllegalStartSimpleExpr(illegalToken: String)(using Context) -extends SyntaxMsg(IllegalStartSimpleExprID) { +extends SyntaxMsg(IllegalStartSimpleExprID): def msg(using Context) = i"expression expected but ${Red(illegalToken)} found" - def explain(using Context) = { + def explain(using Context) = i"""|An expression cannot start with ${Red(illegalToken)}.""" - } -} class MissingReturnType()(implicit ctx:Context) -extends SyntaxMsg(MissingReturnTypeID) { +extends SyntaxMsg(MissingReturnTypeID): def msg(using Context) = "Missing return type" def explain(using Context) = i"""|An abstract declaration must have a return type. For example: | |trait Shape: | ${hl("def area: Double")} // abstract declaration returning a Double""" -} class MissingReturnTypeWithReturnStatement(method: Symbol)(using Context) -extends SyntaxMsg(MissingReturnTypeWithReturnStatementID) { +extends SyntaxMsg(MissingReturnTypeWithReturnStatementID): def msg(using Context) = i"$method has a return statement; it needs a result type" def explain(using Context) = i"""|If a method contains a ${hl("return")} statement, it must have an |explicit return type. For example: | |${hl("def good: Int /* explicit return type */ = return 1")}""" -} class YieldOrDoExpectedInForComprehension()(using Context) -extends SyntaxMsg(YieldOrDoExpectedInForComprehensionID) { +extends SyntaxMsg(YieldOrDoExpectedInForComprehensionID): def msg(using Context) = i"${hl("yield")} or ${hl("do")} expected" def explain(using Context) = @@ -635,14 +600,13 @@ extends SyntaxMsg(YieldOrDoExpectedInForComprehensionID) { |${hl("for i <- 1 to 3 do println(i) // notice the 'do' keyword")} | |""" -} class ProperDefinitionNotFound()(using Context) -extends Message(ProperDefinitionNotFoundID) { +extends Message(ProperDefinitionNotFoundID): def kind = MessageKind.DocComment def msg(using Context) = i"""Proper definition was not found in ${hl("@usecase")}""" - def explain(using Context) = { + def explain(using Context) = val noUsecase = "def map[B, That](f: A => B)(implicit bf: CanBuildFrom[List[A], B, That]): That" @@ -673,11 +637,9 @@ extends Message(ProperDefinitionNotFoundID) { |When creating the docs, the signature of the method is substituted by the |usecase and the compiler makes sure that it is valid. Because of this, you're |only allowed to use ${hl("def")}s when defining usecases.""" - } -} class ByNameParameterNotSupported(tpe: untpd.Tree)(using Context) -extends SyntaxMsg(ByNameParameterNotSupportedID) { +extends SyntaxMsg(ByNameParameterNotSupportedID): def msg(using Context) = i"By-name parameter type ${tpe} not allowed here." def explain(using Context) = @@ -697,10 +659,9 @@ extends SyntaxMsg(ByNameParameterNotSupportedID) { |And the usage could be as such: |${hl("func(bool => // do something...)")} |""" -} class WrongNumberOfTypeArgs(fntpe: Type, expectedArgs: List[ParamInfo], actual: List[untpd.Tree])(using Context) -extends SyntaxMsg(WrongNumberOfTypeArgsID) { +extends SyntaxMsg(WrongNumberOfTypeArgsID): private val expectedCount = expectedArgs.length private val actualCount = actual.length @@ -720,7 +681,7 @@ extends SyntaxMsg(WrongNumberOfTypeArgsID) { |expected: $expectedArgString |actual: $actualArgString""" - def explain(using Context) = { + def explain(using Context) = val tooManyTypeParams = """|val tuple2: (Int, String) = (1, "one") |val list: List[(Int, String)] = List(tuple2)""".stripMargin @@ -737,13 +698,11 @@ extends SyntaxMsg(WrongNumberOfTypeArgsID) { else i"""|You have not supplied enough type parameters |If you specify one type parameter then you need to specify every type parameter.""" - } -} class IllegalVariableInPatternAlternative(name: Name)(using Context) -extends SyntaxMsg(IllegalVariableInPatternAlternativeID) { +extends SyntaxMsg(IllegalVariableInPatternAlternativeID): def msg(using Context) = i"Illegal variable $name in pattern alternative" - def explain(using Context) = { + def explain(using Context) = val varInAlternative = """|def g(pair: (Int,Int)): Int = pair match { | case (1, n) | (n, 1) => n @@ -765,13 +724,11 @@ extends SyntaxMsg(IllegalVariableInPatternAlternativeID) { |could be implemented by moving each alternative into a separate case: | |$fixedVarInAlternative""" - } -} class IdentifierExpected(identifier: String)(using Context) -extends SyntaxMsg(IdentifierExpectedID) { +extends SyntaxMsg(IdentifierExpectedID): def msg(using Context) = "identifier expected" - def explain(using Context) = { + def explain(using Context) = val wrongIdentifier = i"def foo: $identifier = {...}" val validIdentifier = i"def foo = {...}" i"""|An identifier expected, but $identifier found. This could be because @@ -785,11 +742,9 @@ extends SyntaxMsg(IdentifierExpectedID) { |$validIdentifier | |""" - } -} class AuxConstructorNeedsNonImplicitParameter()(implicit ctx:Context) -extends SyntaxMsg(AuxConstructorNeedsNonImplicitParameterID) { +extends SyntaxMsg(AuxConstructorNeedsNonImplicitParameterID): def msg(using Context) = "Auxiliary constructor needs non-implicit parameter list" def explain(using Context) = i"""|Only the primary constructor is allowed an ${hl("implicit")} parameter list; @@ -801,10 +756,9 @@ extends SyntaxMsg(AuxConstructorNeedsNonImplicitParameterID) { | - Forgotten parenthesis on ${hl("this")} (${hl("def this() = { ... }")}) | - Auxiliary constructors specify the implicit value |""" -} class IllegalLiteral()(using Context) -extends SyntaxMsg(IllegalLiteralID) { +extends SyntaxMsg(IllegalLiteralID): def msg(using Context) = "Illegal literal" def explain(using Context) = i"""|Available literals can be divided into several groups: @@ -815,7 +769,6 @@ extends SyntaxMsg(IllegalLiteralID) { | - String Literals: "Hello, World!" | - null |""" -} class LossyWideningConstantConversion(sourceType: Type, targetType: Type)(using Context) extends Message(LossyWideningConstantConversionID): @@ -825,7 +778,7 @@ extends Message(LossyWideningConstantConversionID): def explain(using Context) = "" class PatternMatchExhaustivity(uncoveredFn: => String, hasMore: Boolean)(using Context) -extends Message(PatternMatchExhaustivityID) { +extends Message(PatternMatchExhaustivityID): def kind = MessageKind.PatternMatchExhaustivity lazy val uncovered = uncoveredFn def msg(using Context) = @@ -842,10 +795,9 @@ extends Message(PatternMatchExhaustivityID) { | - If an extractor always return ${hl("Some(...)")}, write ${hl("Some[X]")} for its return type | - Add a ${hl("case _ => ...")} at the end to match all remaining cases |""" -} class UncheckedTypePattern(msgFn: => String)(using Context) - extends PatternMatchMsg(UncheckedTypePatternID) { + extends PatternMatchMsg(UncheckedTypePatternID): def msg(using Context) = msgFn def explain(using Context) = i"""|Type arguments and type refinements are erased during compile time, thus it's @@ -853,23 +805,20 @@ class UncheckedTypePattern(msgFn: => String)(using Context) | |You can either replace the type arguments by ${hl("_")} or use `@unchecked`. |""" -} class MatchCaseUnreachable()(using Context) -extends Message(MatchCaseUnreachableID) { +extends Message(MatchCaseUnreachableID): def kind = MessageKind.MatchCaseUnreachable def msg(using Context) = "Unreachable case" def explain(using Context) = "" -} class MatchCaseOnlyNullWarning()(using Context) -extends PatternMatchMsg(MatchCaseOnlyNullWarningID) { +extends PatternMatchMsg(MatchCaseOnlyNullWarningID): def msg(using Context) = i"""Unreachable case except for ${hl("null")} (if this is intentional, consider writing ${hl("case null =>")} instead).""" def explain(using Context) = "" -} class MatchableWarning(tp: Type, pattern: Boolean)(using Context) -extends TypeMsg(MatchableWarningID) { +extends TypeMsg(MatchableWarningID): def msg(using Context) = val kind = if pattern then "pattern selector" else "value" i"""${kind} should be an instance of Matchable,, @@ -892,12 +841,11 @@ extends TypeMsg(MatchableWarningID) { else i"""The value can be converted to a `Matchable` by appending `.asMatchable`. |`asMatchable` needs to be imported from scala.compiletime.""" -} class SeqWildcardPatternPos()(using Context) -extends SyntaxMsg(SeqWildcardPatternPosID) { +extends SyntaxMsg(SeqWildcardPatternPosID): def msg(using Context) = i"""${hl("*")} can be used only for last argument""" - def explain(using Context) = { + def explain(using Context) = val code = """def sumOfTheFirstTwo(list: List[Int]): Int = list match { | case List(first, second, x*) => first + second @@ -914,13 +862,11 @@ extends SyntaxMsg(SeqWildcardPatternPosID) { |${hl("sumOfTheFirstTwo(List(1, 2, 10))")} | |would give 3 as a result""" - } -} class IllegalStartOfSimplePattern()(using Context) -extends SyntaxMsg(IllegalStartOfSimplePatternID) { +extends SyntaxMsg(IllegalStartOfSimplePatternID): def msg(using Context) = "pattern expected" - def explain(using Context) = { + def explain(using Context) = val sipCode = """def f(x: Int, y: Int) = x match { | case `y` => ... @@ -993,17 +939,14 @@ extends SyntaxMsg(IllegalStartOfSimplePatternID) { | | ${hl("def unapplySeq[A](x: List[A]): Some[List[A]]")} |""" - } -} class PkgDuplicateSymbol(existing: Symbol)(using Context) -extends NamingMsg(PkgDuplicateSymbolID) { +extends NamingMsg(PkgDuplicateSymbolID): def msg(using Context) = i"Trying to define package with same name as $existing" def explain(using Context) = "" -} class ExistentialTypesNoLongerSupported()(using Context) -extends SyntaxMsg(ExistentialTypesNoLongerSupportedID) { +extends SyntaxMsg(ExistentialTypesNoLongerSupportedID): def msg(using Context) = i"""|Existential types are no longer supported - |use a wildcard or dependent type instead""" @@ -1022,10 +965,9 @@ extends SyntaxMsg(ExistentialTypesNoLongerSupportedID) { | |${hl("List[?]")} |""" -} class UnboundWildcardType()(using Context) -extends SyntaxMsg(UnboundWildcardTypeID) { +extends SyntaxMsg(UnboundWildcardTypeID): def msg(using Context) = "Unbound wildcard type" def explain(using Context) = i"""|The wildcard type syntax (${hl("_")}) was used where it could not be bound. @@ -1066,10 +1008,9 @@ extends SyntaxMsg(UnboundWildcardTypeID) { | Use: | ${hl("val foo: Int = 3")} |""" -} class OverridesNothing(member: Symbol)(using Context) -extends DeclarationMsg(OverridesNothingID) { +extends DeclarationMsg(OverridesNothingID): def msg(using Context) = i"""${member} overrides nothing""" def explain(using Context) = @@ -1077,10 +1018,9 @@ extends DeclarationMsg(OverridesNothingID) { |class of ${member.owner} to override it. Did you misspell it? |Are you extending the right classes? |""" -} class OverridesNothingButNameExists(member: Symbol, existing: List[Denotations.SingleDenotation])(using Context) -extends DeclarationMsg(OverridesNothingButNameExistsID) { +extends DeclarationMsg(OverridesNothingButNameExistsID): def msg(using Context) = val what = if !existing.exists(_.symbol.hasTargetName(member.targetName)) @@ -1098,7 +1038,6 @@ extends DeclarationMsg(OverridesNothingButNameExistsID) { |named ${member.name}: | ${existingDecl} |""" -} class OverrideError( core: Context ?=> String, base: Type, @@ -1113,7 +1052,7 @@ extends DeclarationMsg(OverrideErrorID), NoDisambiguation: i"""| |(Note that ${err.infoStringWithLocation(other, base)} is abstract, |and is therefore overridden by concrete ${err.infoStringWithLocation(member, base)})""" - else "" + else "" i"""error overriding ${err.infoStringWithLocation(other, base)}; | ${err.infoString(member, base, showLocation = member.owner != base.typeSymbol)} $core$addendum""" override def canExplain = @@ -1122,7 +1061,7 @@ extends DeclarationMsg(OverrideErrorID), NoDisambiguation: if canExplain then err.whyNoMatchStr(memberTp, otherTp) else "" class ForwardReferenceExtendsOverDefinition(value: Symbol, definition: Symbol)(using Context) -extends ReferenceMsg(ForwardReferenceExtendsOverDefinitionID) { +extends ReferenceMsg(ForwardReferenceExtendsOverDefinitionID): def msg(using Context) = i"${definition.name} is a forward reference extending over the definition of ${value.name}" def explain(using Context) = @@ -1137,10 +1076,9 @@ extends ReferenceMsg(ForwardReferenceExtendsOverDefinitionID) { |the declaration of ${definition.name} and its use, |or define ${value.name} as lazy. |""" -} class ExpectedTokenButFound(expected: Token, found: Token)(using Context) -extends SyntaxMsg(ExpectedTokenButFoundID) { +extends SyntaxMsg(ExpectedTokenButFoundID): private def foundText = Tokens.showToken(found) @@ -1156,10 +1094,9 @@ extends SyntaxMsg(ExpectedTokenButFoundID) { |If you want to use $foundText as identifier, you may put it in backticks: `${Tokens.tokenString(found)}`.""".stripMargin else "" -} class MixedLeftAndRightAssociativeOps(op1: Name, op2: Name, op2LeftAssoc: Boolean)(using Context) -extends SyntaxMsg(MixedLeftAndRightAssociativeOpsID) { +extends SyntaxMsg(MixedLeftAndRightAssociativeOpsID): def msg(using Context) = val op1Asso: String = if (op2LeftAssoc) "which is right-associative" else "which is left-associative" val op2Asso: String = if (op2LeftAssoc) "which is left-associative" else "which is right-associative" @@ -1190,10 +1127,9 @@ extends SyntaxMsg(MixedLeftAndRightAssociativeOpsID) { | (all other special characters) |Operators starting with a letter have lowest precedence, followed by operators starting with `|`, etc. |""".stripMargin -} class CantInstantiateAbstractClassOrTrait(cls: Symbol, isTrait: Boolean)(using Context) -extends TypeMsg(CantInstantiateAbstractClassOrTraitID) { +extends TypeMsg(CantInstantiateAbstractClassOrTraitID): private val traitOrAbstract = if (isTrait) "a trait" else "abstract" def msg(using Context) = i"""${cls.name} is ${traitOrAbstract}; it cannot be instantiated""" def explain(using Context) = @@ -1208,7 +1144,6 @@ extends TypeMsg(CantInstantiateAbstractClassOrTraitID) { | |You need to implement any abstract members in both cases. |""" -} class UnreducibleApplication(tycon: Type)(using Context) extends TypeMsg(UnreducibleApplicationID): def msg(using Context) = i"unreducible application of higher-kinded type $tycon to wildcard arguments" @@ -1218,7 +1153,7 @@ class UnreducibleApplication(tycon: Type)(using Context) extends TypeMsg(Unreduc |supported in Scala 3.""" class OverloadedOrRecursiveMethodNeedsResultType(cycleSym: Symbol)(using Context) -extends CyclicMsg(OverloadedOrRecursiveMethodNeedsResultTypeID) { +extends CyclicMsg(OverloadedOrRecursiveMethodNeedsResultTypeID): def msg(using Context) = i"""Overloaded or recursive $cycleSym needs return type""" def explain(using Context) = i"""Case 1: $cycleSym is overloaded @@ -1229,18 +1164,16 @@ extends CyclicMsg(OverloadedOrRecursiveMethodNeedsResultTypeID) { |If $cycleSym calls itself on any path (even through mutual recursion), you need to specify the return type |of $cycleSym or of a definition it's mutually recursive with. |""" -} class RecursiveValueNeedsResultType(cycleSym: Symbol)(using Context) -extends CyclicMsg(RecursiveValueNeedsResultTypeID) { +extends CyclicMsg(RecursiveValueNeedsResultTypeID): def msg(using Context) = i"""Recursive $cycleSym needs type""" def explain(using Context) = i"""The definition of $cycleSym is recursive and you need to specify its type. |""" -} class CyclicReferenceInvolving(denot: SymDenotation)(using Context) -extends CyclicMsg(CyclicReferenceInvolvingID) { +extends CyclicMsg(CyclicReferenceInvolvingID): def msg(using Context) = val where = if denot.exists then s" involving $denot" else "" i"Cyclic reference$where" @@ -1249,10 +1182,9 @@ extends CyclicMsg(CyclicReferenceInvolvingID) { |compiler to decide upon ${denot.name}'s type. |To avoid this error, try giving ${denot.name} an explicit type. |""" -} class CyclicReferenceInvolvingImplicit(cycleSym: Symbol)(using Context) -extends CyclicMsg(CyclicReferenceInvolvingImplicitID) { +extends CyclicMsg(CyclicReferenceInvolvingImplicitID): def msg(using Context) = i"""Cyclic reference involving implicit $cycleSym""" def explain(using Context) = i"""|$cycleSym is declared as part of a cycle which makes it impossible for the @@ -1260,7 +1192,6 @@ extends CyclicMsg(CyclicReferenceInvolvingImplicitID) { |This might happen when the right hand-side of $cycleSym's definition involves an implicit search. |To avoid this error, try giving ${cycleSym.name} an explicit type. |""" -} class SkolemInInferred(tree: tpd.Tree, pt: Type, argument: tpd.Tree)(using Context) extends TypeMsg(SkolemInInferredID): @@ -1282,7 +1213,7 @@ extends TypeMsg(SkolemInInferredID): |relying on implicit search at this point.""" class SuperQualMustBeParent(qual: untpd.Ident, cls: ClassSymbol)(using Context) -extends ReferenceMsg(SuperQualMustBeParentID) { +extends ReferenceMsg(SuperQualMustBeParentID): def msg(using Context) = i"""|$qual does not name a parent of $cls""" def explain(using Context) = val parents: Seq[String] = (cls.info.parents map (_.typeSymbol.name.show)).sorted @@ -1292,16 +1223,14 @@ extends ReferenceMsg(SuperQualMustBeParentID) { |In this case, the parents of $cls are: |${parents.mkString(" - ", "\n - ", "")} |""" -} class VarArgsParamMustComeLast()(using Context) -extends SyntaxMsg(VarArgsParamMustComeLastID) { +extends SyntaxMsg(VarArgsParamMustComeLastID): def msg(using Context) = i"""${hl("varargs")} parameter must come last""" def explain(using Context) = i"""|The ${hl("varargs")} field must be the last field in the method signature. |Attempting to define a field in a method signature after a ${hl("varargs")} field is an error. |""" -} import typer.Typer.BindingPrec @@ -1337,26 +1266,24 @@ class ConstrProxyShadows(proxy: TermRef, shadowed: Type, shadowedIsApply: Boolea end ConstrProxyShadows class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec, prevCtx: Context)(using Context) - extends ReferenceMsg(AmbiguousReferenceID), NoDisambiguation { + extends ReferenceMsg(AmbiguousReferenceID), NoDisambiguation: /** A string which explains how something was bound; Depending on `prec` this is either * imported by * or defined in */ - private def bindingString(prec: BindingPrec, whereFound: Context, qualifier: String = "")(using Context) = { - val howVisible = prec match { + private def bindingString(prec: BindingPrec, whereFound: Context, qualifier: String = "")(using Context) = + val howVisible = prec match case BindingPrec.Definition => "defined" case BindingPrec.Inheritance => "inherited" case BindingPrec.NamedImport => "imported by name" case BindingPrec.WildImport => "imported" case BindingPrec.PackageClause => "found" case BindingPrec.NothingBound => assert(false) - } - if (prec.isImportPrec) { + if (prec.isImportPrec) i"""$howVisible$qualifier by ${whereFound.importInfo}""" - } else + else i"""$howVisible$qualifier in ${whereFound.owner}""" - } def msg(using Context) = i"""|Reference to $name is ambiguous. @@ -1386,10 +1313,9 @@ class AmbiguousReference(name: Name, newPrec: BindingPrec, prevPrec: BindingPrec | - When importing, you can avoid naming conflicts by renaming: | ${hl("import")} scala.{$name => ${name.show}Tick} |""" -} class MethodDoesNotTakeParameters(tree: tpd.Tree)(using Context) -extends TypeMsg(MethodDoesNotTakeParametersId) { +extends TypeMsg(MethodDoesNotTakeParametersId): def methodSymbol(using Context): Symbol = def recur(t: tpd.Tree): Symbol = val sym = tpd.methPart(t).symbol @@ -1400,27 +1326,24 @@ extends TypeMsg(MethodDoesNotTakeParametersId) { else sym recur(tree) - def msg(using Context) = { + def msg(using Context) = val more = if (tree.isInstanceOf[tpd.Apply]) " more" else "" val meth = methodSymbol val methStr = if (meth.exists) meth.showLocated else "expression" i"$methStr does not take$more parameters" - } - def explain(using Context) = { + def explain(using Context) = val isNullary = methodSymbol.info.isInstanceOf[ExprType] val addendum = if (isNullary) "\nNullary methods may not be called with parenthesis" else "" "You have specified more parameter lists than defined in the method definition(s)." + addendum - } -} class AmbiguousOverload(tree: tpd.Tree, val alternatives: List[SingleDenotation], pt: Type, addendum: String = "")( implicit ctx: Context) -extends ReferenceMsg(AmbiguousOverloadID), NoDisambiguation { +extends ReferenceMsg(AmbiguousOverloadID), NoDisambiguation: private def all = if (alternatives.length == 2) "both" else "all" def msg(using Context) = i"""|Ambiguous overload. The ${err.overloadedAltsStr(alternatives)} @@ -1432,7 +1355,6 @@ extends ReferenceMsg(AmbiguousOverloadID), NoDisambiguation { |- assigning it to a value with a specified type, or |- adding a type ascription as in ${hl("instance.myMethod: String => Int")} |""" -} class AmbiguousExtensionMethod(tree: untpd.Tree, expansion1: tpd.Tree, expansion2: tpd.Tree)(using Context) extends ReferenceMsg(AmbiguousExtensionMethodID), NoDisambiguation: @@ -1444,7 +1366,7 @@ class AmbiguousExtensionMethod(tree: untpd.Tree, expansion1: tpd.Tree, expansion def explain(using Context) = "" class ReassignmentToVal(name: Name)(using Context) - extends TypeMsg(ReassignmentToValID) { + extends TypeMsg(ReassignmentToValID): def msg(using Context) = i"""Reassignment to val $name""" def explain(using Context) = i"""|You can not assign a new value to $name as values can't be changed. @@ -1454,10 +1376,9 @@ class ReassignmentToVal(name: Name)(using Context) |variable | ${hl("var")} $name ${hl("=")} ... |""" -} class TypeDoesNotTakeParameters(tpe: Type, params: List[untpd.Tree])(using Context) - extends TypeMsg(TypeDoesNotTakeParametersID) { + extends TypeMsg(TypeDoesNotTakeParametersID): private def fboundsAddendum(using Context) = if tpe.typeSymbol.isAllOf(Provisional | TypeParam) then "\n(Note that F-bounds of type parameters may not be type lambdas)" @@ -1470,10 +1391,9 @@ class TypeDoesNotTakeParameters(tpe: Type, params: List[untpd.Tree])(using Conte i"""You specified ${NoColor(ps)} for $tpe, which is not |declared to take any. |""" -} class VarValParametersMayNotBeCallByName(name: TermName, mutable: Boolean)(using Context) - extends SyntaxMsg(VarValParametersMayNotBeCallByNameID) { + extends SyntaxMsg(VarValParametersMayNotBeCallByNameID): def varOrVal = if mutable then hl("var") else hl("val") def msg(using Context) = s"$varOrVal parameters may not be call-by-name" def explain(using Context) = @@ -1484,24 +1404,21 @@ class VarValParametersMayNotBeCallByName(name: TermName, mutable: Boolean)(using | ${s" def $name() = ${name}Tick"} | ${hl("}")} |""" -} class MissingTypeParameterFor(tpe: Type)(using Context) - extends SyntaxMsg(MissingTypeParameterForID) { + extends SyntaxMsg(MissingTypeParameterForID): def msg(using Context) = if tpe.derivesFrom(defn.AnyKindClass) then i"$tpe cannot be used as a value type" else i"Missing type parameter for $tpe" def explain(using Context) = "" -} class MissingTypeParameterInTypeApp(tpe: Type)(using Context) - extends TypeMsg(MissingTypeParameterInTypeAppID) { + extends TypeMsg(MissingTypeParameterInTypeAppID): def numParams = tpe.typeParams.length def parameters = if (numParams == 1) "parameter" else "parameters" def msg(using Context) = i"Missing type $parameters for $tpe" def explain(using Context) = i"A fully applied type is expected but $tpe takes $numParams $parameters" -} class MissingArgument(pname: Name, methString: String)(using Context) extends TypeMsg(MissingArgumentID): @@ -1511,14 +1428,12 @@ class MissingArgument(pname: Name, methString: String)(using Context) def explain(using Context) = "" class MissingArgumentList(method: String, sym: Symbol)(using Context) - extends TypeMsg(MissingArgumentListID) { + extends TypeMsg(MissingArgumentListID): def msg(using Context) = val symDcl = if sym.exists then "\n\n " + hl(sym.showDcl(using ctx.withoutColors)) else "" i"missing argument list for $method$symDcl" - def explain(using Context) = { + def explain(using Context) = i"""Unapplied methods are only converted to functions when a function type is expected.""" - } -} class DoesNotConformToBound(tpe: Type, which: String, bound: Type)(using Context) extends TypeMismatchMsg( @@ -1537,16 +1452,14 @@ class DoesNotConformToBound(tpe: Type, which: String, bound: Type)(using Context class DoesNotConformToSelfType(category: String, selfType: Type, cls: Symbol, otherSelf: Type, relation: String, other: Symbol)( implicit ctx: Context) - extends TypeMismatchMsg(selfType, otherSelf)(DoesNotConformToSelfTypeID) { + extends TypeMismatchMsg(selfType, otherSelf)(DoesNotConformToSelfTypeID): def msg(using Context) = i"""$category: self type $selfType of $cls does not conform to self type $otherSelf |of $relation $other""" -} class DoesNotConformToSelfTypeCantBeInstantiated(tp: Type, selfType: Type)( implicit ctx: Context) - extends TypeMismatchMsg(tp, selfType)(DoesNotConformToSelfTypeCantBeInstantiatedID) { + extends TypeMismatchMsg(tp, selfType)(DoesNotConformToSelfTypeCantBeInstantiatedID): def msg(using Context) = i"""$tp does not conform to its self type $selfType; cannot be instantiated""" -} class IllegalParameterInit(found: Type, expected: Type, param: Symbol, cls: Symbol)(using Context) extends TypeMismatchMsg(found, expected)(IllegalParameterInitID): @@ -1558,178 +1471,152 @@ class IllegalParameterInit(found: Type, expected: Type, param: Symbol, cls: Symb class AbstractMemberMayNotHaveModifier(sym: Symbol, flag: FlagSet)( implicit ctx: Context) - extends SyntaxMsg(AbstractMemberMayNotHaveModifierID) { + extends SyntaxMsg(AbstractMemberMayNotHaveModifierID): def msg(using Context) = i"""${hl("abstract")} $sym may not have `${flag.flagsString}` modifier""" def explain(using Context) = "" -} class TypesAndTraitsCantBeImplicit()(using Context) - extends SyntaxMsg(TypesAndTraitsCantBeImplicitID) { + extends SyntaxMsg(TypesAndTraitsCantBeImplicitID): def msg(using Context) = i"""${hl("implicit")} modifier cannot be used for types or traits""" def explain(using Context) = "" -} class OnlyClassesCanBeAbstract(sym: Symbol)( implicit ctx: Context) - extends SyntaxMsg(OnlyClassesCanBeAbstractID) { + extends SyntaxMsg(OnlyClassesCanBeAbstractID): def explain(using Context) = "" def msg(using Context) = i"""${hl("abstract")} modifier can be used only for classes; it should be omitted for abstract members""" -} class AbstractOverrideOnlyInTraits(sym: Symbol)( implicit ctx: Context) - extends SyntaxMsg(AbstractOverrideOnlyInTraitsID) { + extends SyntaxMsg(AbstractOverrideOnlyInTraitsID): def msg(using Context) = i"""${hl("abstract override")} modifier only allowed for members of traits""" def explain(using Context) = "" -} class TraitsMayNotBeFinal(sym: Symbol)( implicit ctx: Context) - extends SyntaxMsg(TraitsMayNotBeFinalID) { + extends SyntaxMsg(TraitsMayNotBeFinalID): def msg(using Context) = i"""$sym may not be ${hl("final")}""" def explain(using Context) = "A trait can never be final since it is abstract and must be extended to be useful." -} class NativeMembersMayNotHaveImplementation(sym: Symbol)( implicit ctx: Context) - extends SyntaxMsg(NativeMembersMayNotHaveImplementationID) { + extends SyntaxMsg(NativeMembersMayNotHaveImplementationID): def msg(using Context) = i"""${hl("@native")} members may not have an implementation""" def explain(using Context) = "" -} class TraitMayNotDefineNativeMethod(sym: Symbol)( implicit ctx: Context) - extends SyntaxMsg(TraitMayNotDefineNativeMethodID) { + extends SyntaxMsg(TraitMayNotDefineNativeMethodID): def msg(using Context) = i"""A trait cannot define a ${hl("@native")} method.""" def explain(using Context) = "" -} class OnlyClassesCanHaveDeclaredButUndefinedMembers(sym: Symbol)( implicit ctx: Context) - extends SyntaxMsg(OnlyClassesCanHaveDeclaredButUndefinedMembersID) { + extends SyntaxMsg(OnlyClassesCanHaveDeclaredButUndefinedMembersID): def msg(using Context) = i"""Declaration of $sym not allowed here: only classes can have declared but undefined members""" def explain(using Context) = if sym.is(Mutable) then "Note that variables need to be initialized to be defined." else "" -} class CannotExtendAnyVal(sym: Symbol)(using Context) - extends SyntaxMsg(CannotExtendAnyValID) { + extends SyntaxMsg(CannotExtendAnyValID): def msg(using Context) = i"""$sym cannot extend ${hl("AnyVal")}""" def explain(using Context) = i"""Only classes (not traits) are allowed to extend ${hl("AnyVal")}, but traits may extend |${hl("Any")} to become ${Green("\"universal traits\"")} which may only have ${hl("def")} members. |Universal traits can be mixed into classes that extend ${hl("AnyVal")}. |""" -} class CannotExtendJavaEnum(sym: Symbol)(using Context) - extends SyntaxMsg(CannotExtendJavaEnumID) { + extends SyntaxMsg(CannotExtendJavaEnumID): def msg(using Context) = i"""$sym cannot extend ${hl("java.lang.Enum")}: only enums defined with the ${hl("enum")} syntax can""" def explain(using Context) = "" - } class CannotExtendContextFunction(sym: Symbol)(using Context) - extends SyntaxMsg(CannotExtendFunctionID) { + extends SyntaxMsg(CannotExtendFunctionID): def msg(using Context) = i"""$sym cannot extend a context function class""" def explain(using Context) = "" - } class JavaEnumParentArgs(parent: Type)(using Context) - extends TypeMsg(JavaEnumParentArgsID) { + extends TypeMsg(JavaEnumParentArgsID): def msg(using Context) = i"""not enough arguments for constructor Enum: ${hl("(name: String, ordinal: Int)")}: ${hl(parent.show)}""" def explain(using Context) = "" - } class CannotHaveSameNameAs(sym: Symbol, cls: Symbol, reason: CannotHaveSameNameAs.Reason)(using Context) - extends NamingMsg(CannotHaveSameNameAsID) { + extends NamingMsg(CannotHaveSameNameAsID): import CannotHaveSameNameAs._ - def reasonMessage(using Context): String = reason match { + def reasonMessage(using Context): String = reason match case CannotBeOverridden => "class definitions cannot be overridden" case DefinedInSelf(self) => s"""cannot define ${sym.showKind} member with the same name as a ${cls.showKind} member in self reference ${self.name}. |(Note: this can be resolved by using another name) |""".stripMargin - } def msg(using Context) = i"""$sym cannot have the same name as ${cls.showLocated} -- """ + reasonMessage def explain(using Context) = "" -} -object CannotHaveSameNameAs { +object CannotHaveSameNameAs: sealed trait Reason case object CannotBeOverridden extends Reason case class DefinedInSelf(self: tpd.ValDef) extends Reason -} class ValueClassesMayNotDefineInner(valueClass: Symbol, inner: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotDefineInnerID) { + extends SyntaxMsg(ValueClassesMayNotDefineInnerID): def msg(using Context) = i"""Value classes may not define an inner class""" def explain(using Context) = "" -} class ValueClassesMayNotDefineNonParameterField(valueClass: Symbol, field: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotDefineNonParameterFieldID) { + extends SyntaxMsg(ValueClassesMayNotDefineNonParameterFieldID): def msg(using Context) = i"""Value classes may not define non-parameter field""" def explain(using Context) = "" -} class ValueClassesMayNotDefineASecondaryConstructor(valueClass: Symbol, constructor: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotDefineASecondaryConstructorID) { + extends SyntaxMsg(ValueClassesMayNotDefineASecondaryConstructorID): def msg(using Context) = i"""Value classes may not define a secondary constructor""" def explain(using Context) = "" -} class ValueClassesMayNotContainInitalization(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotContainInitalizationID) { + extends SyntaxMsg(ValueClassesMayNotContainInitalizationID): def msg(using Context) = i"""Value classes may not contain initialization statements""" def explain(using Context) = "" -} class ValueClassesMayNotBeAbstract(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotBeAbstractID) { + extends SyntaxMsg(ValueClassesMayNotBeAbstractID): def msg(using Context) = i"""Value classes may not be ${hl("abstract")}""" def explain(using Context) = "" -} class ValueClassesMayNotBeContainted(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotBeContaintedID) { + extends SyntaxMsg(ValueClassesMayNotBeContaintedID): private def localOrMember = if (valueClass.owner.isTerm) "local class" else "member of another class" def msg(using Context) = s"""Value classes may not be a $localOrMember""" def explain(using Context) = "" -} class ValueClassesMayNotWrapAnotherValueClass(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassesMayNotWrapAnotherValueClassID) { + extends SyntaxMsg(ValueClassesMayNotWrapAnotherValueClassID): def msg(using Context) = """A value class may not wrap another user-defined value class""" def explain(using Context) = "" -} class ValueClassParameterMayNotBeAVar(valueClass: Symbol, param: Symbol)(using Context) - extends SyntaxMsg(ValueClassParameterMayNotBeAVarID) { + extends SyntaxMsg(ValueClassParameterMayNotBeAVarID): def msg(using Context) = i"""A value class parameter may not be a ${hl("var")}""" def explain(using Context) = i"""A value class must have exactly one ${hl("val")} parameter.""" -} class ValueClassNeedsOneValParam(valueClass: Symbol)(using Context) - extends SyntaxMsg(ValueClassNeedsExactlyOneValParamID) { + extends SyntaxMsg(ValueClassNeedsExactlyOneValParamID): def msg(using Context) = i"""Value class needs one ${hl("val")} parameter""" def explain(using Context) = "" -} class ValueClassParameterMayNotBeCallByName(valueClass: Symbol, param: Symbol)(using Context) - extends SyntaxMsg(ValueClassParameterMayNotBeCallByNameID) { + extends SyntaxMsg(ValueClassParameterMayNotBeCallByNameID): def msg(using Context) = s"Value class parameter `${param.name}` may not be call-by-name" def explain(using Context) = "" -} class SuperCallsNotAllowedInlineable(symbol: Symbol)(using Context) - extends SyntaxMsg(SuperCallsNotAllowedInlineableID) { + extends SyntaxMsg(SuperCallsNotAllowedInlineableID): def msg(using Context) = i"Super call not allowed in inlineable $symbol" def explain(using Context) = "Method inlining prohibits calling superclass methods, as it may lead to confusion about which super is being called." -} class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathID): def msg(using Context) = i"$tp is not a valid $usage, since it is not an immutable path" @@ -1740,53 +1627,47 @@ class NotAPath(tp: Type, usage: String)(using Context) extends TypeMsg(NotAPathI | - a selection of an immutable path with an immutable value.""" class WrongNumberOfParameters(expected: Int)(using Context) - extends SyntaxMsg(WrongNumberOfParametersID) { + extends SyntaxMsg(WrongNumberOfParametersID): def msg(using Context) = s"Wrong number of parameters, expected: $expected" def explain(using Context) = "" -} class DuplicatePrivateProtectedQualifier()(using Context) - extends SyntaxMsg(DuplicatePrivateProtectedQualifierID) { + extends SyntaxMsg(DuplicatePrivateProtectedQualifierID): def msg(using Context) = "Duplicate private/protected qualifier" def explain(using Context) = i"It is not allowed to combine `private` and `protected` modifiers even if they are qualified to different scopes" -} class ExpectedStartOfTopLevelDefinition()(using Context) - extends SyntaxMsg(ExpectedStartOfTopLevelDefinitionID) { + extends SyntaxMsg(ExpectedStartOfTopLevelDefinitionID): def msg(using Context) = "Expected start of definition" def explain(using Context) = i"You have to provide either ${hl("class")}, ${hl("trait")}, ${hl("object")}, or ${hl("enum")} definitions after qualifiers" -} class NoReturnFromInlineable(owner: Symbol)(using Context) - extends SyntaxMsg(NoReturnFromInlineableID) { + extends SyntaxMsg(NoReturnFromInlineableID): def msg(using Context) = i"No explicit ${hl("return")} allowed from inlineable $owner" def explain(using Context) = i"""Methods marked with ${hl("inline")} modifier may not use ${hl("return")} statements. |Instead, you should rely on the last expression's value being |returned from a method. |""" -} class ReturnOutsideMethodDefinition(owner: Symbol)(using Context) - extends SyntaxMsg(ReturnOutsideMethodDefinitionID) { + extends SyntaxMsg(ReturnOutsideMethodDefinitionID): def msg(using Context) = i"${hl("return")} outside method definition" def explain(using Context) = i"""You used ${hl("return")} in ${owner}. |${hl("return")} is a keyword and may only be used within method declarations. |""" -} class ExtendFinalClass(clazz:Symbol, finalClazz: Symbol)(using Context) - extends SyntaxMsg(ExtendFinalClassID) { + extends SyntaxMsg(ExtendFinalClassID): def msg(using Context) = i"$clazz cannot extend ${hl("final")} $finalClazz" def explain(using Context) = i"""A class marked with the ${hl("final")} keyword cannot be extended""" -} class ExpectedTypeBoundOrEquals(found: Token)(using Context) - extends SyntaxMsg(ExpectedTypeBoundOrEqualsID) { + extends SyntaxMsg(ExpectedTypeBoundOrEqualsID): def msg(using Context) = i"${hl("=")}, ${hl(">:")}, or ${hl("<:")} expected, but ${Tokens.showToken(found)} found" def explain(using Context) = @@ -1800,10 +1681,9 @@ class ExpectedTypeBoundOrEquals(found: Token)(using Context) |An upper type bound ${hl("T <: A")} declares that type variable ${hl("T")} |refers to a subtype of type ${hl("A")}. |""" -} class ClassAndCompanionNameClash(cls: Symbol, other: Symbol)(using Context) - extends NamingMsg(ClassAndCompanionNameClashID) { + extends NamingMsg(ClassAndCompanionNameClashID): def msg(using Context) = val name = cls.name.stripModuleClassSuffix i"Name clash: both ${cls.owner} and its companion object defines $name" @@ -1811,11 +1691,10 @@ class ClassAndCompanionNameClash(cls: Symbol, other: Symbol)(using Context) i"""|A ${cls.kindString} and its companion object cannot both define a ${hl("class")}, ${hl("trait")} or ${hl("object")} with the same name: | - ${cls.owner} defines ${cls} | - ${other.owner} defines ${other}""" -} class TailrecNotApplicable(symbol: Symbol)(using Context) - extends SyntaxMsg(TailrecNotApplicableID) { - def msg(using Context) = { + extends SyntaxMsg(TailrecNotApplicableID): + def msg(using Context) = val reason = if !symbol.is(Method) then i"$symbol isn't a method" else if symbol.is(Deferred) then i"$symbol is abstract" @@ -1823,12 +1702,10 @@ class TailrecNotApplicable(symbol: Symbol)(using Context) else i"$symbol contains no recursive calls" s"TailRec optimisation not applicable, $reason" - } def explain(using Context) = "" -} class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: List[Symbol], classRoot: Symbol)(using Context) - extends Message(FailureToEliminateExistentialID) { + extends Message(FailureToEliminateExistentialID): def kind = MessageKind.Compatibility def msg(using Context) = val originalType = ctx.printer.dclsText(boundSyms, "; ").show @@ -1844,20 +1721,18 @@ class FailureToEliminateExistential(tp: Type, tp1: Type, tp2: Type, boundSyms: L |Scala-3 does applications of class types to wildcard type arguments. |Other forms of existential types that come from Scala-2 classfiles |are only approximated in a best-effort way.""" -} class OnlyFunctionsCanBeFollowedByUnderscore(tp: Type)(using Context) - extends SyntaxMsg(OnlyFunctionsCanBeFollowedByUnderscoreID) { + extends SyntaxMsg(OnlyFunctionsCanBeFollowedByUnderscoreID): def msg(using Context) = i"Only function types can be followed by ${hl("_")} but the current expression has type $tp" def explain(using Context) = i"""The syntax ${hl("x _")} is no longer supported if ${hl("x")} is not a function. |To convert to a function value, you need to explicitly write ${hl("() => x")}""" -} class MissingEmptyArgumentList(method: String)(using Context) - extends SyntaxMsg(MissingEmptyArgumentListID) { + extends SyntaxMsg(MissingEmptyArgumentListID): def msg(using Context) = i"$method must be called with ${hl("()")} argument" - def explain(using Context) = { + def explain(using Context) = val codeExample = """def next(): T = ... |next // is expanded to next()""" @@ -1868,22 +1743,18 @@ class MissingEmptyArgumentList(method: String)(using Context) | |In Dotty, this idiom is an error. The application syntax has to follow exactly the parameter syntax. |Excluded from this rule are methods that are defined in Java or that override methods defined in Java.""" - } -} class DuplicateNamedTypeParameter(name: Name)(using Context) - extends SyntaxMsg(DuplicateNamedTypeParameterID) { + extends SyntaxMsg(DuplicateNamedTypeParameterID): def msg(using Context) = i"Type parameter $name was defined multiple times." def explain(using Context) = "" -} class UndefinedNamedTypeParameter(undefinedName: Name, definedNames: List[Name])(using Context) - extends SyntaxMsg(UndefinedNamedTypeParameterID) { + extends SyntaxMsg(UndefinedNamedTypeParameterID): def msg(using Context) = i"Type parameter $undefinedName is undefined. Expected one of ${definedNames.map(_.show).mkString(", ")}." def explain(using Context) = "" -} -class IllegalStartOfStatement(what: String, isModifier: Boolean, isStat: Boolean)(using Context) extends SyntaxMsg(IllegalStartOfStatementID) { +class IllegalStartOfStatement(what: String, isModifier: Boolean, isStat: Boolean)(using Context) extends SyntaxMsg(IllegalStartOfStatementID): def msg(using Context) = if isStat then "this kind of statement is not allowed here" @@ -1893,11 +1764,10 @@ class IllegalStartOfStatement(what: String, isModifier: Boolean, isStat: Boolean def explain(using Context) = i"""A statement is an import or export, a definition or an expression. |Some statements are only allowed in certain contexts""" -} -class TraitIsExpected(symbol: Symbol)(using Context) extends SyntaxMsg(TraitIsExpectedID) { +class TraitIsExpected(symbol: Symbol)(using Context) extends SyntaxMsg(TraitIsExpectedID): def msg(using Context) = i"$symbol is not a trait" - def explain(using Context) = { + def explain(using Context) = val errorCodeExample = """class A |class B @@ -1919,13 +1789,10 @@ class TraitIsExpected(symbol: Symbol)(using Context) extends SyntaxMsg(TraitIsEx | |$codeExample |""" - } -} -class TraitRedefinedFinalMethodFromAnyRef(method: Symbol)(using Context) extends SyntaxMsg(TraitRedefinedFinalMethodFromAnyRefID) { +class TraitRedefinedFinalMethodFromAnyRef(method: Symbol)(using Context) extends SyntaxMsg(TraitRedefinedFinalMethodFromAnyRefID): def msg(using Context) = i"Traits cannot redefine final $method from ${hl("class AnyRef")}." def explain(using Context) = "" -} class AlreadyDefined(name: Name, owner: Symbol, conflicting: Symbol)(using Context) extends NamingMsg(AlreadyDefinedID): @@ -1945,7 +1812,7 @@ extends NamingMsg(AlreadyDefinedID): i"$name is already defined as $conflicting$where$note" def explain(using Context) = "" -class PackageNameAlreadyDefined(pkg: Symbol)(using Context) extends NamingMsg(PackageNameAlreadyDefinedID) { +class PackageNameAlreadyDefined(pkg: Symbol)(using Context) extends NamingMsg(PackageNameAlreadyDefinedID): def msg(using Context) = def where = if pkg.associatedFile == null then "" else s" in ${pkg.associatedFile}" i"""${pkg.name} is the name of $pkg$where. @@ -1954,10 +1821,9 @@ class PackageNameAlreadyDefined(pkg: Symbol)(using Context) extends NamingMsg(Pa def or = if pkg.associatedFile == null then "" else " or delete the containing class file" i"""An ${hl("object")} or other toplevel definition cannot have the same name as an existing ${hl("package")}. |Rename either one of them$or.""" -} class UnapplyInvalidNumberOfArguments(qual: untpd.Tree, argTypes: List[Type])(using Context) - extends SyntaxMsg(UnapplyInvalidNumberOfArgumentsID) { + extends SyntaxMsg(UnapplyInvalidNumberOfArgumentsID): def msg(using Context) = i"Wrong number of argument patterns for $qual; expected: ($argTypes%, %)" def explain(using Context) = i"""The Unapply method of $qual was used with incorrect number of arguments. @@ -1966,10 +1832,9 @@ class UnapplyInvalidNumberOfArguments(qual: untpd.Tree, argTypes: List[Type])(us | |where subsequent arguments would have following types: ($argTypes%, %). |""" -} class UnapplyInvalidReturnType(unapplyResult: Type, unapplyName: Name)(using Context) - extends DeclarationMsg(UnapplyInvalidReturnTypeID) { + extends DeclarationMsg(UnapplyInvalidReturnTypeID): def msg(using Context) = val addendum = if Feature.migrateTo3 && unapplyName == nme.unapplySeq @@ -2025,17 +1890,15 @@ class UnapplyInvalidReturnType(unapplyResult: Type, unapplyName: Name)(using Con | } |} """ -} -class StaticFieldsOnlyAllowedInObjects(member: Symbol)(using Context) extends SyntaxMsg(StaticFieldsOnlyAllowedInObjectsID) { +class StaticFieldsOnlyAllowedInObjects(member: Symbol)(using Context) extends SyntaxMsg(StaticFieldsOnlyAllowedInObjectsID): def msg(using Context) = i"${hl("@static")} $member in ${member.owner} must be defined inside a static ${hl("object")}." def explain(using Context) = i"${hl("@static")} members are only allowed inside objects." -} -class StaticFieldsShouldPrecedeNonStatic(member: Symbol, defns: List[tpd.Tree])(using Context) extends SyntaxMsg(StaticFieldsShouldPrecedeNonStaticID) { +class StaticFieldsShouldPrecedeNonStatic(member: Symbol, defns: List[tpd.Tree])(using Context) extends SyntaxMsg(StaticFieldsShouldPrecedeNonStaticID): def msg(using Context) = i"${hl("@static")} $member in ${member.owner} must be defined before non-static fields." - def explain(using Context) = { + def explain(using Context) = val nonStatics = defns.takeWhile(_.symbol != member).take(3).filter(_.isInstanceOf[tpd.ValDef]) val codeExample = s"""object ${member.owner.name.firstPart} { | @static ${member} = ... @@ -2050,12 +1913,10 @@ class StaticFieldsShouldPrecedeNonStatic(member: Symbol, defns: List[tpd.Tree])( |The definition of ${member.name} should have been before the non ${hl("@static val")}s: |$codeExample |""" - } -} -class CyclicInheritance(symbol: Symbol, addendum: => String)(using Context) extends SyntaxMsg(CyclicInheritanceID) { +class CyclicInheritance(symbol: Symbol, addendum: => String)(using Context) extends SyntaxMsg(CyclicInheritanceID): def msg(using Context) = i"Cyclic inheritance: $symbol extends itself$addendum" - def explain(using Context) = { + def explain(using Context) = val codeExample = "class A extends A" i"""Cyclic inheritance is prohibited in Dotty. @@ -2066,12 +1927,10 @@ class CyclicInheritance(symbol: Symbol, addendum: => String)(using Context) exte |The example mentioned above would fail because this type of inheritance hierarchy |creates a "cycle" where a not yet defined class A extends itself which makes |impossible to instantiate an object of this class""" - } -} class BadSymbolicReference(denot: SymDenotation)(using Context) -extends ReferenceMsg(BadSymbolicReferenceID) { - def msg(using Context) = { +extends ReferenceMsg(BadSymbolicReferenceID): + def msg(using Context) = val denotationOwner = denot.owner val denotationName = ctx.fresh.setSetting(ctx.settings.YdebugNames, true).printer.nameString(denot.name) val file = denot.symbol.associatedFile @@ -2083,41 +1942,36 @@ extends ReferenceMsg(BadSymbolicReferenceID) { |refers to $denotationName in ${denotationOwner.showKind} ${denotationOwner.showFullName} which is not available. |It may be completely missing from the current classpath, or the version on |the classpath might be incompatible with the version used when compiling $src.""" - } def explain(using Context) = "" -} -class UnableToExtendSealedClass(pclazz: Symbol)(using Context) extends SyntaxMsg(UnableToExtendSealedClassID) { +class UnableToExtendSealedClass(pclazz: Symbol)(using Context) extends SyntaxMsg(UnableToExtendSealedClassID): def msg(using Context) = i"Cannot extend ${hl("sealed")} $pclazz in a different source file" def explain(using Context) = "A sealed class or trait can only be extended in the same file as its declaration" -} class SymbolHasUnparsableVersionNumber(symbol: Symbol, errorMessage: String)(using Context) -extends SyntaxMsg(SymbolHasUnparsableVersionNumberID) { +extends SyntaxMsg(SymbolHasUnparsableVersionNumberID): def msg(using Context) = i"${symbol.showLocated} has an unparsable version number: $errorMessage" def explain(using Context) = i"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics |between versions and the ${hl("-Xmigration")} settings is used to warn about constructs |whose behavior may have changed since version change.""" -} class SymbolChangedSemanticsInVersion( symbol: Symbol, migrationVersion: ScalaVersion, migrationMessage: String -)(using Context) extends SyntaxMsg(SymbolChangedSemanticsInVersionID) { +)(using Context) extends SyntaxMsg(SymbolChangedSemanticsInVersionID): def msg(using Context) = i"${symbol.showLocated} has changed semantics in version $migrationVersion: $migrationMessage" def explain(using Context) = i"""The ${symbol.showLocated} is marked with ${hl("@migration")} indicating it has changed semantics |between versions and the ${hl("-Xmigration")} settings is used to warn about constructs |whose behavior may have changed since version change.""" -} class UnableToEmitSwitch()(using Context) -extends SyntaxMsg(UnableToEmitSwitchID) { +extends SyntaxMsg(UnableToEmitSwitchID): def msg(using Context) = i"Could not emit switch for ${hl("@switch")} annotated match" - def explain(using Context) = { + def explain(using Context) = val codeExample = """val ConstantB = 'B' |final val ConstantC = 'C' @@ -2139,51 +1993,45 @@ extends SyntaxMsg(UnableToEmitSwitchID) { |- the matched value is not of type ${hl("Int")}, ${hl("Byte")}, ${hl("Short")} or ${hl("Char")} |- the matched value is not a constant literal |- there are less than three cases""" - } -} class MissingCompanionForStatic(member: Symbol)(using Context) -extends SyntaxMsg(MissingCompanionForStaticID) { +extends SyntaxMsg(MissingCompanionForStaticID): def msg(using Context) = i"${member.owner} does not have a companion class" def explain(using Context) = i"An object that contains ${hl("@static")} members must have a companion class." -} class PolymorphicMethodMissingTypeInParent(rsym: Symbol, parentSym: Symbol)(using Context) -extends SyntaxMsg(PolymorphicMethodMissingTypeInParentID) { +extends SyntaxMsg(PolymorphicMethodMissingTypeInParentID): def msg(using Context) = i"Polymorphic refinement $rsym without matching type in parent $parentSym is no longer allowed" def explain(using Context) = i"""Polymorphic $rsym is not allowed in the structural refinement of $parentSym because |$rsym does not override any method in $parentSym. Structural refinement does not allow for |polymorphic methods.""" -} class ParamsNoInline(owner: Symbol)(using Context) - extends SyntaxMsg(ParamsNoInlineID) { + extends SyntaxMsg(ParamsNoInlineID): def msg(using Context) = i"""${hl("inline")} modifier can only be used for parameters of inline methods""" def explain(using Context) = "" -} -class JavaSymbolIsNotAValue(symbol: Symbol)(using Context) extends TypeMsg(JavaSymbolIsNotAValueID) { +class JavaSymbolIsNotAValue(symbol: Symbol)(using Context) extends TypeMsg(JavaSymbolIsNotAValueID): def msg(using Context) = val kind = if symbol is Package then i"$symbol" else i"Java defined ${hl("class " + symbol.name)}" s"$kind is not a value" def explain(using Context) = "" -} class DoubleDefinition(decl: Symbol, previousDecl: Symbol, base: Symbol)(using Context) -extends NamingMsg(DoubleDefinitionID) { - def msg(using Context) = { +extends NamingMsg(DoubleDefinitionID): + def msg(using Context) = def nameAnd = if (decl.name != previousDecl.name) " name and" else "" def erasedType = if ctx.erasedTypes then i" ${decl.info}" else "" def details(using Context): String = - if (decl.isRealMethod && previousDecl.isRealMethod) { + if (decl.isRealMethod && previousDecl.isRealMethod) import Signature.MatchDegree._ // compare the signatures when both symbols represent methods - decl.signature.matchDegree(previousDecl.signature) match { + decl.signature.matchDegree(previousDecl.signature) match case NoMatch => // If the signatures don't match at all at the current phase, then // they might match after erasure. @@ -2206,16 +2054,13 @@ extends NamingMsg(DoubleDefinitionID) { |for disambiguation.""" else "" i"have the same$nameAnd type$erasedType after erasure.$hint" - } - } else "" - def symLocation(sym: Symbol) = { + def symLocation(sym: Symbol) = val lineDesc = if (sym.span.exists && sym.span != sym.owner.span) s" at line ${sym.srcPos.line + 1}" else "" i"in ${sym.owner}${lineDesc}" - } val clashDescription = if (decl.owner eq previousDecl.owner) "Double definition" @@ -2230,37 +2075,31 @@ extends NamingMsg(DoubleDefinitionID) { |${decl.showDcl} ${symLocation(decl)} |""" } + details - } def explain(using Context) = "" -} -class ImportRenamedTwice(ident: untpd.Ident)(using Context) extends SyntaxMsg(ImportRenamedTwiceID) { +class ImportRenamedTwice(ident: untpd.Ident)(using Context) extends SyntaxMsg(ImportRenamedTwiceID): def msg(using Context) = s"${ident.show} is renamed twice on the same import line." def explain(using Context) = "" -} -class TypeTestAlwaysDiverges(scrutTp: Type, testTp: Type)(using Context) extends SyntaxMsg(TypeTestAlwaysDivergesID) { +class TypeTestAlwaysDiverges(scrutTp: Type, testTp: Type)(using Context) extends SyntaxMsg(TypeTestAlwaysDivergesID): def msg(using Context) = s"This type test will never return a result since the scrutinee type ${scrutTp.show} does not contain any value." def explain(using Context) = "" -} // Relative of CyclicReferenceInvolvingImplicit and RecursiveValueNeedsResultType class TermMemberNeedsResultTypeForImplicitSearch(cycleSym: Symbol)(using Context) - extends CyclicMsg(TermMemberNeedsNeedsResultTypeForImplicitSearchID) { + extends CyclicMsg(TermMemberNeedsNeedsResultTypeForImplicitSearchID): def msg(using Context) = i"""$cycleSym needs result type because its right-hand side attempts implicit search""" def explain(using Context) = i"""|The right hand-side of $cycleSym's definition requires an implicit search at the highlighted position. |To avoid this error, give `$cycleSym` an explicit type. |""" -} -class ClassCannotExtendEnum(cls: Symbol, parent: Symbol)(using Context) extends SyntaxMsg(ClassCannotExtendEnumID) { +class ClassCannotExtendEnum(cls: Symbol, parent: Symbol)(using Context) extends SyntaxMsg(ClassCannotExtendEnumID): def msg(using Context) = i"""$cls in ${cls.owner} extends enum ${parent.name}, but extending enums is prohibited.""" def explain(using Context) = "" -} -class NotAnExtractor(tree: untpd.Tree)(using Context) extends SyntaxMsg(NotAnExtractorID) { +class NotAnExtractor(tree: untpd.Tree)(using Context) extends SyntaxMsg(NotAnExtractorID): def msg(using Context) = i"$tree cannot be used as an extractor in a pattern because it lacks an unapply or unapplySeq method" def explain(using Context) = i"""|An ${hl("unapply")} method should be defined in an ${hl("object")} as follow: @@ -2271,77 +2110,67 @@ class NotAnExtractor(tree: untpd.Tree)(using Context) extends SyntaxMsg(NotAnExt |Sometimes, the number of sub-values isn't fixed and we would like to return a sequence. |For this reason, you can also define patterns through ${hl("unapplySeq")} which returns ${hl("Option[Seq[T]]")}. |This mechanism is used for instance in pattern ${hl("case List(x1, ..., xn)")}""" -} class MemberWithSameNameAsStatic()(using Context) - extends SyntaxMsg(MemberWithSameNameAsStaticID) { + extends SyntaxMsg(MemberWithSameNameAsStaticID): def msg(using Context) = i"Companion classes cannot define members with same name as a ${hl("@static")} member" def explain(using Context) = "" -} class PureExpressionInStatementPosition(stat: untpd.Tree, val exprOwner: Symbol)(using Context) - extends Message(PureExpressionInStatementPositionID) { + extends Message(PureExpressionInStatementPositionID): def kind = MessageKind.PotentialIssue def msg(using Context) = "A pure expression does nothing in statement position; you may be omitting necessary parentheses" def explain(using Context) = i"""The pure expression $stat doesn't have any side effect and its result is not assigned elsewhere. |It can be removed without changing the semantics of the program. This may indicate an error.""" -} class UnqualifiedCallToAnyRefMethod(stat: untpd.Tree, method: Symbol)(using Context) - extends Message(UnqualifiedCallToAnyRefMethodID) { + extends Message(UnqualifiedCallToAnyRefMethodID): def kind = MessageKind.PotentialIssue def msg(using Context) = i"Suspicious top-level unqualified call to ${hl(method.name.toString)}" def explain(using Context) = i"""Top-level unqualified calls to ${hl("AnyRef")} or ${hl("Any")} methods such as ${hl(method.name.toString)} are |resolved to calls on ${hl("Predef")} or on imported methods. This might not be what |you intended.""" -} class TraitCompanionWithMutableStatic()(using Context) - extends SyntaxMsg(TraitCompanionWithMutableStaticID) { + extends SyntaxMsg(TraitCompanionWithMutableStaticID): def msg(using Context) = i"Companion of traits cannot define mutable @static fields" def explain(using Context) = "" -} class LazyStaticField()(using Context) - extends SyntaxMsg(LazyStaticFieldID) { + extends SyntaxMsg(LazyStaticFieldID): def msg(using Context) = i"Lazy @static fields are not supported" def explain(using Context) = "" -} class StaticOverridingNonStaticMembers()(using Context) - extends SyntaxMsg(StaticOverridingNonStaticMembersID) { + extends SyntaxMsg(StaticOverridingNonStaticMembersID): def msg(using Context) = i"${hl("@static")} members cannot override or implement non-static ones" def explain(using Context) = "" -} class OverloadInRefinement(rsym: Symbol)(using Context) - extends DeclarationMsg(OverloadInRefinementID) { + extends DeclarationMsg(OverloadInRefinementID): def msg(using Context) = "Refinements cannot introduce overloaded definitions" def explain(using Context) = i"""The refinement `$rsym` introduces an overloaded definition. |Refinements cannot contain overloaded definitions.""" -} class NoMatchingOverload(val alternatives: List[SingleDenotation], pt: Type)(using Context) - extends TypeMsg(NoMatchingOverloadID) { + extends TypeMsg(NoMatchingOverloadID): def msg(using Context) = i"""None of the ${err.overloadedAltsStr(alternatives)} |match ${err.expectedTypeStr(pt)}""" def explain(using Context) = "" -} class StableIdentPattern(tree: untpd.Tree, pt: Type)(using Context) - extends TypeMsg(StableIdentPatternID) { + extends TypeMsg(StableIdentPatternID): def msg(using Context) = i"""Stable identifier required, but $tree found""" def explain(using Context) = "" -} class IllegalSuperAccessor(base: Symbol, memberName: Name, targetName: Name, acc: Symbol, accTp: Type, - other: Symbol, otherTp: Type)(using Context) extends DeclarationMsg(IllegalSuperAccessorID) { - def msg(using Context) = { + other: Symbol, otherTp: Type)(using Context) extends DeclarationMsg(IllegalSuperAccessorID): + def msg(using Context) = // The mixin containing a super-call that requires a super-accessor val accMixin = acc.owner // The class or trait that the super-accessor should resolve too in `base` @@ -2352,18 +2181,16 @@ class IllegalSuperAccessor(base: Symbol, memberName: Name, targetName: Name, val resolvedSuperCall = hl(i"super[${otherMixin.name}].$memberName") // The super-call that we would have called if `super` in traits behaved like it // does in classes, i.e. followed the linearization of the trait itself. - val staticSuperCall = { + val staticSuperCall = val staticSuper = accMixin.asClass.info.parents.reverse .find(_.nonPrivateMember(memberName) .matchingDenotation(accMixin.thisType, acc.info, targetName).exists) - val staticSuperName = staticSuper match { + val staticSuperName = staticSuper match case Some(parent) => parent.classSymbol.name.show case None => // Might be reachable under separate compilation "SomeParent" - } hl(i"super[$staticSuperName].$memberName") - } i"""$base cannot be defined due to a conflict between its parents when |implementing a super-accessor for $memberName in $accMixin: | @@ -2390,12 +2217,10 @@ class IllegalSuperAccessor(base: Symbol, memberName: Name, targetName: Name, |2. Alternatively, replace $superCall in the body of $accMixin by a | super-call to a specific parent, e.g. $staticSuperCall |""" - } def explain(using Context) = "" -} class TraitParameterUsedAsParentPrefix(cls: Symbol)(using Context) - extends DeclarationMsg(TraitParameterUsedAsParentPrefixID) { + extends DeclarationMsg(TraitParameterUsedAsParentPrefixID): def msg(using Context) = s"${cls.show} cannot extend from a parent that is derived via its own parameters" def explain(using Context) = @@ -2407,10 +2232,9 @@ class TraitParameterUsedAsParentPrefix(cls: Symbol)(using Context) |In order to fix this issue consider directly extending from the parent rather |than obtaining it from the parameters of ${cls.show}. |""" -} class UnknownNamedEnclosingClassOrObject(name: TypeName)(using Context) - extends ReferenceMsg(UnknownNamedEnclosingClassOrObjectID) { + extends ReferenceMsg(UnknownNamedEnclosingClassOrObjectID): def msg(using Context) = i"""no enclosing class or object is named '${hl(name.show)}'""" def explain(using Context) = @@ -2420,26 +2244,23 @@ class UnknownNamedEnclosingClassOrObject(name: TypeName)(using Context) |'${hl(name.show)}' is not misspelled and has been imported into the |current scope. """ - } class IllegalCyclicTypeReference(sym: Symbol, where: String, lastChecked: Type)(using Context) - extends CyclicMsg(IllegalCyclicTypeReferenceID) { + extends CyclicMsg(IllegalCyclicTypeReferenceID): def msg(using Context) = val lastCheckedStr = try lastChecked.show catch case ex: CyclicReference => "..." i"illegal cyclic type reference: ${where} ${hl(lastCheckedStr)} of $sym refers back to the type itself" def explain(using Context) = "" -} class ErasedTypesCanOnlyBeFunctionTypes()(using Context) - extends SyntaxMsg(ErasedTypesCanOnlyBeFunctionTypesID) { + extends SyntaxMsg(ErasedTypesCanOnlyBeFunctionTypesID): def msg(using Context) = "Types with erased keyword can only be function types `(erased ...) => ...`" def explain(using Context) = "" -} class CaseClassMissingNonImplicitParamList(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(CaseClassMissingNonImplicitParamListID) { + extends SyntaxMsg(CaseClassMissingNonImplicitParamListID): def msg(using Context) = i"""|A ${hl("case class")} must have at least one leading non-implicit parameter list""" @@ -2447,10 +2268,9 @@ class CaseClassMissingNonImplicitParamList(cdef: untpd.TypeDef)(using Context) i"""|${cdef.name} must have at least one leading non-implicit parameter list, | if you're aiming to have a case class parametrized only by implicit ones, you should | add an explicit ${hl("()")} as the first parameter list to ${cdef.name}.""" -} class EnumerationsShouldNotBeEmpty(cdef: untpd.TypeDef)(using Context) - extends SyntaxMsg(EnumerationsShouldNotBeEmptyID) { + extends SyntaxMsg(EnumerationsShouldNotBeEmptyID): def msg(using Context) = "Enumerations must contain at least one case" def explain(using Context) = @@ -2460,10 +2280,9 @@ class EnumerationsShouldNotBeEmpty(cdef: untpd.TypeDef)(using Context) | ${hl("case")} Option1, Option2 | } |""" -} class TypedCaseDoesNotExplicitlyExtendTypedEnum(enumDef: Symbol, caseDef: untpd.TypeDef)(using Context) - extends SyntaxMsg(TypedCaseDoesNotExplicitlyExtendTypedEnumID) { + extends SyntaxMsg(TypedCaseDoesNotExplicitlyExtendTypedEnumID): def msg(using Context) = i"explicit extends clause needed because both enum case and enum class have type parameters" def explain(using Context) = @@ -2474,29 +2293,26 @@ class TypedCaseDoesNotExplicitlyExtendTypedEnum(enumDef: Symbol, caseDef: untpd. | ${hl("case")} ${caseDef.name}[U](u: U) ${hl("extends")} ${enumDef.name}[U] | } |""" -} class IllegalRedefinitionOfStandardKind(kindType: String, name: Name)(using Context) - extends SyntaxMsg(IllegalRedefinitionOfStandardKindID) { + extends SyntaxMsg(IllegalRedefinitionOfStandardKindID): def msg(using Context) = i"illegal redefinition of standard $kindType $name" def explain(using Context) = i"""| "$name" is a standard Scala core `$kindType` | Please choose a different name to avoid conflicts |""" -} class NoExtensionMethodAllowed(mdef: untpd.DefDef)(using Context) - extends SyntaxMsg(NoExtensionMethodAllowedID) { + extends SyntaxMsg(NoExtensionMethodAllowedID): def msg(using Context) = i"No extension method allowed here, since collective parameters are given" def explain(using Context) = i"""|Extension method: | `${mdef}` |is defined inside an extension clause which has collective parameters. |""" -} class ExtensionMethodCannotHaveTypeParams(mdef: untpd.DefDef)(using Context) - extends SyntaxMsg(ExtensionMethodCannotHaveTypeParamsID) { + extends SyntaxMsg(ExtensionMethodCannotHaveTypeParamsID): def msg(using Context) = i"Extension method cannot have type parameters since some were already given previously" def explain(using Context) = @@ -2505,19 +2321,17 @@ class ExtensionMethodCannotHaveTypeParams(mdef: untpd.DefDef)(using Context) |has type parameters `[${mdef.leadingTypeParams.map(_.show).mkString(",")}]`, while the extension clause has |it's own type parameters. Please consider moving these to the extension clause's type parameter list. |""" -} class ExtensionCanOnlyHaveDefs(mdef: untpd.Tree)(using Context) - extends SyntaxMsg(ExtensionCanOnlyHaveDefsID) { + extends SyntaxMsg(ExtensionCanOnlyHaveDefsID): def msg(using Context) = i"Only methods allowed here, since collective parameters are given" def explain(using Context) = i"""Extension clauses can only have `def`s | `${mdef.show}` is not a valid expression here. |""" -} class UnexpectedPatternForSummonFrom(tree: Tree[_])(using Context) - extends SyntaxMsg(UnexpectedPatternForSummonFromID) { + extends SyntaxMsg(UnexpectedPatternForSummonFromID): def msg(using Context) = i"Unexpected pattern for summonFrom. Expected ${hl("`x: T`")} or ${hl("`_`")}" def explain(using Context) = i"""|The pattern "${tree.show}" provided in the ${hl("case")} expression of the ${hl("summonFrom")}, @@ -2533,39 +2347,34 @@ class UnexpectedPatternForSummonFrom(tree: Tree[_])(using Context) | case _ => ??? | } |""" -} class AnonymousInstanceCannotBeEmpty(impl: untpd.Template)(using Context) - extends SyntaxMsg(AnonymousInstanceCannotBeEmptyID) { + extends SyntaxMsg(AnonymousInstanceCannotBeEmptyID): def msg(using Context) = i"anonymous instance must implement a type or have at least one extension method" def explain(using Context) = i"""|Anonymous instances cannot be defined with an empty body. The block |`${impl.show}` should either contain an implemented type or at least one extension method. |""" -} class ModifierNotAllowedForDefinition(flag: Flag)(using Context) - extends SyntaxMsg(ModifierNotAllowedForDefinitionID) { + extends SyntaxMsg(ModifierNotAllowedForDefinitionID): def msg(using Context) = i"Modifier ${hl(flag.flagsString)} is not allowed for this definition" def explain(using Context) = "" -} class RedundantModifier(flag: Flag)(using Context) - extends SyntaxMsg(RedundantModifierID) { + extends SyntaxMsg(RedundantModifierID): def msg(using Context) = i"Modifier ${hl(flag.flagsString)} is redundant for this definition" def explain(using Context) = "" -} class InvalidReferenceInImplicitNotFoundAnnotation(typeVar: String, owner: String)(using Context) - extends ReferenceMsg(InvalidReferenceInImplicitNotFoundAnnotationID) { + extends ReferenceMsg(InvalidReferenceInImplicitNotFoundAnnotationID): def msg(using Context) = i"""|Invalid reference to a type variable ${hl(typeVar)} found in the annotation argument. |The variable does not occur as a parameter in the scope of ${hl(owner)}. |""" def explain(using Context) = "" -} class CaseClassInInlinedCode(tree: tpd.Tree)(using Context) - extends SyntaxMsg(CaseClassInInlinedCodeID) { + extends SyntaxMsg(CaseClassInInlinedCodeID): def defKind = if tree.symbol.is(Module) then "object" else "class" def msg(using Context) = s"Case $defKind definitions are not allowed in inline methods or quoted code. Use a normal $defKind instead." @@ -2573,7 +2382,6 @@ class CaseClassInInlinedCode(tree: tpd.Tree)(using Context) i"""Case class/object definitions generate a considerable footprint in code size. |Inlining such definition would multiply this footprint for each call site. |""" -} class ImplicitSearchTooLargeWarning(limit: Int, openSearchPairs: List[(Candidate, Type)])(using Context) extends TypeMsg(ImplicitSearchTooLargeID): @@ -2733,10 +2541,9 @@ class MissingImplicitArgument( filter(userDefinedImplicitNotFoundParamMessage) .orElse(filter(userDefinedImplicitNotFoundTypeMessage)) - object AmbiguousImplicitMsg { + object AmbiguousImplicitMsg: def unapply(search: SearchSuccess): Option[String] = userDefinedMsg(search.ref.symbol, defn.ImplicitAmbiguousAnnot) - } def msg(using Context): String = @@ -2772,11 +2579,10 @@ class MissingImplicitArgument( /** Construct a custom error message given an ambiguous implicit * candidate `alt` and a user defined message `raw`. */ - def userDefinedAmbiguousImplicitMsg(alt: SearchSuccess, raw: String) = { - val params = alt.ref.underlying match { + def userDefinedAmbiguousImplicitMsg(alt: SearchSuccess, raw: String) = + val params = alt.ref.underlying match case p: PolyType => p.paramNames.map(_.toString) case _ => Nil - } def resolveTypes(targs: List[tpd.Tree])(using Context) = targs.map(a => Inferencing.fullyDefinedType(a.tpe, "type argument", a.srcPos)) @@ -2795,7 +2601,6 @@ class MissingImplicitArgument( val targs = tpd.typeArgss(call).flatten val args = resolveTypes(targs)(using ctx.fresh.setTyperState(alt.tstate)) userDefinedErrorString(raw, params, args) - } /** Extracting the message from a type, e.g. in * diff --git a/compiler/src/dotty/tools/dotc/reporting/trace.scala b/compiler/src/dotty/tools/dotc/reporting/trace.scala index 8e8d3efb8b40..89fccac46d0d 100644 --- a/compiler/src/dotty/tools/dotc/reporting/trace.scala +++ b/compiler/src/dotty/tools/dotc/reporting/trace.scala @@ -60,10 +60,9 @@ trait TraceSyntax: t => i"$t" } else - summonFrom { + summonFrom: case given Show[T] => t => i"$t" case _ => alwaysToString - } showOp })(op) diff --git a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala index 45cc3c4ccfe0..7fe3c9f4156c 100644 --- a/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala +++ b/compiler/src/dotty/tools/dotc/rewrites/Rewrites.scala @@ -12,14 +12,13 @@ import java.io.OutputStreamWriter import java.nio.charset.StandardCharsets.UTF_8 /** Handles rewriting of Scala2 files to Dotty */ -object Rewrites { +object Rewrites: private class PatchedFiles extends mutable.HashMap[SourceFile, Patches] - private case class Patch(span: Span, replacement: String) { + private case class Patch(span: Span, replacement: String): def delta = replacement.length - (span.end - span.start) - } - private class Patches(source: SourceFile) { + private class Patches(source: SourceFile): private[Rewrites] val pbuf = new mutable.ListBuffer[Patch]() def addPatch(span: Span, replacement: String): Unit = @@ -29,7 +28,7 @@ object Rewrites { pbuf.indices.reverse.find(i => span.contains(pbuf(i).span)).foreach(pbuf.remove) pbuf += Patch(span, replacement) - def apply(cs: Array[Char]): Array[Char] = { + def apply(cs: Array[Char]): Array[Char] = val delta = pbuf.map(_.delta).sum val patches = pbuf.toList.sortBy(p => (p.span.start, p.span.end)) if (patches.nonEmpty) @@ -38,13 +37,12 @@ object Rewrites { p2 } val ds = new Array[Char](cs.length + delta) - @tailrec def loop(ps: List[Patch], inIdx: Int, outIdx: Int): Unit = { - def copy(upTo: Int): Int = { + @tailrec def loop(ps: List[Patch], inIdx: Int, outIdx: Int): Unit = + def copy(upTo: Int): Int = val untouched = upTo - inIdx System.arraycopy(cs, inIdx, ds, outIdx, untouched) outIdx + untouched - } - ps match { + ps match case patch @ Patch(span, replacement) :: ps1 => val outNew = copy(span.start) replacement.copyToArray(ds, outNew) @@ -52,18 +50,14 @@ object Rewrites { case Nil => val outNew = copy(cs.length) assert(outNew == ds.length, s"$outNew != ${ds.length}") - } - } loop(patches, 0, 0) ds - } def writeBack(): Unit = val chars = apply(source.underlying.content) val osw = OutputStreamWriter(source.file.output, UTF_8) try osw.write(chars, 0, chars.length) finally osw.close() - } /** If -rewrite is set, record a patch that replaces the range * given by `span` in `source` by `replacement` @@ -96,16 +90,13 @@ object Rewrites { /** If -rewrite is set, apply all patches and overwrite patched source files. */ def writeBack()(using Context): Unit = - for (rewrites <- ctx.settings.rewrite.value; source <- rewrites.patched.keys) { + for (rewrites <- ctx.settings.rewrite.value; source <- rewrites.patched.keys) report.echo(s"[patched file ${source.file.path}]") rewrites.patched(source).writeBack() - } -} /** A completely encapsulated class representing rewrite state, used * as an optional setting. */ -class Rewrites { +class Rewrites: import Rewrites._ private val patched = new PatchedFiles -} diff --git a/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala b/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala index aa98f79c8e3b..df54932623c0 100644 --- a/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala +++ b/compiler/src/dotty/tools/dotc/sbt/APIUtils.scala @@ -16,13 +16,12 @@ import xsbti.api.SafeLazy.strict * * Mostly comes from https://github.com/sbt/zinc/blob/c46643f3e68d7d4f270bf318e3f150f5a59c0aab/internal/zinc-apiinfo/src/main/scala/xsbt/api/APIUtil.scala */ -object APIUtils { - private object Constants { +object APIUtils: + private object Constants: val PublicAccess = api.Public.create() val EmptyModifiers = new api.Modifiers(false, false, false, false, false, false, false, false) val EmptyStructure = api.Structure.of(strict(Array.empty), strict(Array.empty), strict(Array.empty)) val EmptyType = api.EmptyType.of() - } import Constants._ @@ -34,15 +33,13 @@ object APIUtils { * to be constructed, but if the class is never accessed by Scala source code, * a dummy empty class can be registered instead, using this method. */ - def registerDummyClass(classSym: ClassSymbol)(using Context): Unit = { - if (ctx.sbtCallback != null) { + def registerDummyClass(classSym: ClassSymbol)(using Context): Unit = + if (ctx.sbtCallback != null) val classLike = emptyClassLike(classSym) ctx.sbtCallback.api(ctx.compilationUnit.source.file.file, classLike) - } - } // See APIUtils.emptyClassLike - private def emptyClassLike(classSym: ClassSymbol)(using Context): api.ClassLike = { + private def emptyClassLike(classSym: ClassSymbol)(using Context): api.ClassLike = val name = classSym.fullName.stripModuleClassSuffix.toString val definitionType = if (classSym.is(Trait)) api.DefinitionType.Trait @@ -51,5 +48,3 @@ object APIUtils { val topLevel = classSym.isTopLevelClass api.ClassLike.of(name, PublicAccess, EmptyModifiers, Array.empty, definitionType, strict(EmptyType), strict(EmptyStructure), Array.empty, Array.empty, topLevel, Array.empty) - } -} diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala index f54baeb7256c..1f9bf195e2f2 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractAPI.scala @@ -42,16 +42,15 @@ import scala.util.chaining.* * * @see ExtractDependencies */ -class ExtractAPI extends Phase { +class ExtractAPI extends Phase: override def phaseName: String = ExtractAPI.name override def description: String = ExtractAPI.description - override def isRunnable(using Context): Boolean = { + override def isRunnable(using Context): Boolean = def forceRun = ctx.settings.YdumpSbtInc.value || ctx.settings.YforceSbtPhases.value super.isRunnable && (ctx.sbtCallback != null || forceRun) - } // Check no needed. Does not transform trees override def isCheckable: Boolean = false @@ -63,7 +62,7 @@ class ExtractAPI extends Phase { // definitions, and `PostTyper` does not change definitions). override def runsAfter: Set[String] = Set(transform.PostTyper.name) - override def run(using Context): Unit = { + override def run(using Context): Unit = val unit = ctx.compilationUnit val sourceFile = unit.source.file if (ctx.sbtCallback != null) @@ -73,22 +72,19 @@ class ExtractAPI extends Phase { val classes = apiTraverser.apiSource(unit.tpdTree) val mainClasses = apiTraverser.mainClasses - if (ctx.settings.YdumpSbtInc.value) { + if (ctx.settings.YdumpSbtInc.value) // Append to existing file that should have been created by ExtractDependencies val pw = new PrintWriter(File(sourceFile.jpath).changeExtension("inc").toFile .bufferedWriter(append = true), true) - try { + try classes.foreach(source => pw.println(DefaultShowAPI(source))) - } finally pw.close() - } + finally pw.close() if ctx.sbtCallback != null && !ctx.compilationUnit.suspendedAtInliningPhase // already registered before this unit was suspended then classes.foreach(ctx.sbtCallback.api(sourceFile.file, _)) mainClasses.foreach(ctx.sbtCallback.mainClass(sourceFile.file, _)) - } -} object ExtractAPI: val name: String = "sbt-api" @@ -136,7 +132,7 @@ object ExtractAPI: * without going through an intermediate representation, see * http://www.scala-sbt.org/0.13/docs/Understanding-Recompilation.html#Hashing+an+API+representation */ -private class ExtractAPICollector(using Context) extends ThunkHolder { +private class ExtractAPICollector(using Context) extends ThunkHolder: import tpd._ import xsbti.api @@ -167,7 +163,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { private val allNonLocalClassesInSrc = new mutable.HashSet[xsbti.api.ClassLike] private val _mainClasses = new mutable.HashSet[String] - private object Constants { + private object Constants: val emptyStringArray = Array[String]() val local = api.ThisQualifier.create() val public = api.Public.create() @@ -178,7 +174,6 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { val emptyType = api.EmptyType.create() val emptyModifiers = new api.Modifiers(false, false, false, false, false,false, false, false) - } /** Some Dotty types do not have a corresponding type in xsbti.api.* that * represents them. Until this is fixed we can workaround this by using @@ -198,37 +193,34 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { private val superMarker = marker("Super") /** Extract the API representation of a source file */ - def apiSource(tree: Tree): Seq[api.ClassLike] = { - def apiClasses(tree: Tree): Unit = tree match { + def apiSource(tree: Tree): Seq[api.ClassLike] = + def apiClasses(tree: Tree): Unit = tree match case PackageDef(_, stats) => stats.foreach(apiClasses) case tree: TypeDef => apiClass(tree.symbol.asClass) case _ => - } apiClasses(tree) forceThunks() allNonLocalClassesInSrc.toSeq - } def apiClass(sym: ClassSymbol): api.ClassLikeDef = classLikeCache.getOrElseUpdate(sym, computeClass(sym)) - def mainClasses: Set[String] = { + def mainClasses: Set[String] = forceThunks() _mainClasses.toSet - } - private def computeClass(sym: ClassSymbol): api.ClassLikeDef = { + private def computeClass(sym: ClassSymbol): api.ClassLikeDef = import xsbti.api.{DefinitionType => dt} val defType = if (sym.is(Trait)) dt.Trait - else if (sym.is(ModuleClass)) { + else if (sym.is(ModuleClass)) if (sym.is(PackageClass)) dt.PackageModule else dt.Module - } else dt.ClassDef + else dt.ClassDef val selfType = apiType(sym.givenSelfType) @@ -255,36 +247,32 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { allNonLocalClassesInSrc += cl - if (sym.isStatic && !sym.is(Trait) && ctx.platform.hasMainMethod(sym)) { + if (sym.isStatic && !sym.is(Trait) && ctx.platform.hasMainMethod(sym)) // If sym is an object, all main methods count, otherwise only @static ones count. _mainClasses += name - } api.ClassLikeDef.of(name, acc, modifiers, anns, tparams, defType) - } - def apiClassStructure(csym: ClassSymbol): api.Structure = { + def apiClassStructure(csym: ClassSymbol): api.Structure = val cinfo = csym.classInfo - val bases = { + val bases = val ancestorTypes0 = try linearizedAncestorTypes(cinfo) - catch { + catch case ex: TypeError => // See neg/i1750a for an example where a cyclic error can arise. // The root cause in this example is an illegal "override" of an inner trait report.error(ex, csym.sourcePos) defn.ObjectType :: Nil - } - if (ValueClasses.isDerivedValueClass(csym)) { + if (ValueClasses.isDerivedValueClass(csym)) val underlying = ValueClasses.valueClassUnbox(csym).info.finalResultType // The underlying type of a value class should be part of the name hash // of the value class (see the test `value-class-underlying`), this is accomplished // by adding the underlying type to the list of parent types. underlying :: ancestorTypes0 - } else + else ancestorTypes0 - } val apiBases = bases.map(apiType) @@ -306,13 +294,11 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { val apiInherited = lzy(apiDefinitions(inherited).toArray) api.Structure.of(api.SafeLazy.strict(apiBases.toArray), api.SafeLazy.strict(apiDecls.toArray), apiInherited) - } - def linearizedAncestorTypes(info: ClassInfo): List[Type] = { + def linearizedAncestorTypes(info: ClassInfo): List[Type] = val ref = info.appliedRef // Note that the ordering of classes in `baseClasses` is important. info.baseClasses.tail.map(ref.baseType) - } // The hash generated by sbt for definitions is supposed to be symmetric so // we shouldn't have to sort them, but it actually isn't symmetric for @@ -320,26 +306,24 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { // ensure a stable hash. // Modules and classes come first and are sorted by name, all other // definitions come later and are not sorted. - private object classFirstSort extends Ordering[Symbol] { - override def compare(a: Symbol, b: Symbol) = { + private object classFirstSort extends Ordering[Symbol]: + override def compare(a: Symbol, b: Symbol) = val aIsClass = a.isClass val bIsClass = b.isClass - if (aIsClass == bIsClass) { - if (aIsClass) { + if (aIsClass == bIsClass) + if (aIsClass) if (a.is(Module) == b.is(Module)) a.fullName.toString.compareTo(b.fullName.toString) else if (a.is(Module)) -1 else 1 - } else + else 0 - } else if (aIsClass) + else if (aIsClass) -1 - else + else 1 - } - } def apiDefinitions(defs: List[Symbol]): List[api.ClassDefinition] = defs.sorted(classFirstSort).map(apiDefinition(_, inlineOrigin = NoSymbol)) @@ -348,27 +332,25 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { * currently hashing the body of. If it exists, include extra information * that is missing after erasure */ - def apiDefinition(sym: Symbol, inlineOrigin: Symbol): api.ClassDefinition = { - if (sym.isClass) { + def apiDefinition(sym: Symbol, inlineOrigin: Symbol): api.ClassDefinition = + if (sym.isClass) apiClass(sym.asClass) - } else if (sym.isType) { + else if (sym.isType) apiTypeMember(sym.asType) - } else if (sym.is(Mutable, butNot = Accessor)) { + else if (sym.is(Mutable, butNot = Accessor)) api.Var.of(sym.name.toString, apiAccess(sym), apiModifiers(sym), apiAnnotations(sym, inlineOrigin).toArray, apiType(sym.info)) - } else if (sym.isStableMember && !sym.isRealMethod) { + else if (sym.isStableMember && !sym.isRealMethod) api.Val.of(sym.name.toString, apiAccess(sym), apiModifiers(sym), apiAnnotations(sym, inlineOrigin).toArray, apiType(sym.info)) - } else { + else apiDef(sym.asTerm, inlineOrigin) - } - } /** `inlineOrigin` denotes an optional inline method that we are * currently hashing the body of. If it exists, include extra information * that is missing after erasure */ - def apiDef(sym: TermSymbol, inlineOrigin: Symbol): api.Def = { + def apiDef(sym: TermSymbol, inlineOrigin: Symbol): api.Def = var seenInlineExtras = false var inlineExtras = 41 @@ -380,9 +362,8 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { def inlineExtrasAnnot: Option[api.Annotation] = val h = inlineExtras - Option.when(seenInlineExtras) { + Option.when(seenInlineExtras): marker(s"${MurmurHash3.finalizeHash(h, "inlineExtras".hashCode)}") - } def tparamList(pt: TypeLambda): List[api.TypeParameter] = pt.paramNames.lazyZip(pt.paramInfos).map((pname, pbounds) => @@ -396,7 +377,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { param.name.toString, apiType(ptype), param.is(HasDefault), api.ParameterModifier.Plain)) api.ParameterList.of(apiParams.toArray, mt.isImplicitMethod) - def paramLists(t: Type, paramss: List[List[Symbol]]): List[api.ParameterList] = t match { + def paramLists(t: Type, paramss: List[List[Symbol]]): List[api.ParameterList] = t match case pt: TypeLambda => paramLists(pt.resultType, paramss.drop(1)) case mt @ MethodTpe(pnames, ptypes, restpe) => @@ -405,7 +386,6 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { paramList(mt, paramss.head) :: paramLists(restpe, paramss.tail) case _ => Nil - } /** returns list of pairs of 1: the position in all parameter lists, and 2: a type parameter list */ def tparamLists(t: Type, index: Int): List[(Int, List[api.TypeParameter])] = t match @@ -427,17 +407,15 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { val vparamss = paramLists(sym.info, sym.paramSymss) val retTp = sym.info.finalResultType.widenExpr - val tparamsExtraAnnot = Option.when(tparamsExtras.nonEmpty) { + val tparamsExtraAnnot = Option.when(tparamsExtras.nonEmpty): marker(s"${hashTparamsExtras(tparamsExtras)("tparamsExtra".hashCode)}") - } val annotations = inlineExtrasAnnot ++: tparamsExtraAnnot ++: apiAnnotations(sym, inlineOrigin) api.Def.of(sym.zincMangledName.toString, apiAccess(sym), apiModifiers(sym), annotations.toArray, tparams.toArray, vparamss.toArray, apiType(retTp)) - } - def apiTypeMember(sym: TypeSymbol): api.TypeMember = { + def apiTypeMember(sym: TypeSymbol): api.TypeMember = val typeParams = Array[api.TypeParameter]() val name = sym.name.toString val access = apiAccess(sym) @@ -447,28 +425,24 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { if (sym.isAliasType) api.TypeAlias.of(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.hi)) - else { + else assert(sym.isAbstractType) api.TypeDeclaration.of(name, access, modifiers, as.toArray, typeParams, apiType(tpe.bounds.lo), apiType(tpe.bounds.hi)) - } - } // Hack to represent dotty types which don't have an equivalent in xsbti - def combineApiTypes(apiTps: api.Type*): api.Type = { + def combineApiTypes(apiTps: api.Type*): api.Type = api.Structure.of(api.SafeLazy.strict(apiTps.toArray), api.SafeLazy.strict(Array()), api.SafeLazy.strict(Array())) - } - def apiType(tp: Type): api.Type = { + def apiType(tp: Type): api.Type = typeCache.getOrElseUpdate(tp, computeType(tp)) - } - private def computeType(tp: Type): api.Type = { + private def computeType(tp: Type): api.Type = // TODO: Never dealias. We currently have to dealias because // sbt main class discovery relies on the signature of the main // method being fully dealiased. See https://github.com/sbt/zinc/issues/102 val tp2 = if (!tp.isLambdaSub) tp.dealiasKeepAnnots else tp - tp2 match { + tp2 match case NoPrefix | NoType => Constants.emptyType case tp: NamedType => @@ -487,19 +461,17 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { tp.prefix api.Projection.of(apiType(prefix), sym.name.toString) case AppliedType(tycon, args) => - def processArg(arg: Type): api.Type = arg match { + def processArg(arg: Type): api.Type = arg match case arg @ TypeBounds(lo, hi) => // Handle wildcard parameters if (lo.isDirectRef(defn.NothingClass) && hi.isDirectRef(defn.AnyClass)) Constants.emptyType - else { + else val name = "_" val ref = api.ParameterRef.of(name) api.Existential.of(ref, Array(apiTypeParameter(name, 0, lo, hi))) - } case _ => apiType(arg) - } val apiTycon = apiType(tycon) val apiArgs = args.map(processArg) @@ -512,21 +484,19 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { val name = rt.refinedName.toString val parent = apiType(rt.parent) - def typeRefinement(name: String, tp: TypeBounds): api.TypeMember = tp match { + def typeRefinement(name: String, tp: TypeBounds): api.TypeMember = tp match case TypeAlias(alias) => api.TypeAlias.of(name, Constants.public, Constants.emptyModifiers, Array(), Array(), apiType(alias)) case TypeBounds(lo, hi) => api.TypeDeclaration.of(name, Constants.public, Constants.emptyModifiers, Array(), Array(), apiType(lo), apiType(hi)) - } - val decl = rt.refinedInfo match { + val decl = rt.refinedInfo match case rinfo: TypeBounds => typeRefinement(name, rinfo) case _ => report.debuglog(i"sbt-api: skipped structural refinement in $rt") null - } // Aggressive caching for RefinedTypes: `typeCache` is enough as long as two // RefinedType are `==`, but this is only the case when their `refinedInfo` @@ -588,24 +558,20 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { val s = combineApiTypes(apiType(thistpe), apiType(supertpe)) withMarker(s, superMarker) case _ => { - internalError(i"Unhandled type $tp of class ${tp.getClass}") - Constants.emptyType - } - } - } + internalError(i"Unhandled type $tp of class ${tp.getClass}") + Constants.emptyType + } - def apiLazy(tp: => Type): api.Type = { + def apiLazy(tp: => Type): api.Type = // TODO: The sbt api needs a convenient way to make a lazy type. // For now, we repurpose Structure for this. val apiTp = lzy(Array(apiType(tp))) api.Structure.of(apiTp, api.SafeLazy.strict(Array()), api.SafeLazy.strict(Array())) - } - def apiThis(sym: Symbol): api.Singleton = { + def apiThis(sym: Symbol): api.Singleton = val pathComponents = sym.ownersIterator.takeWhile(!_.isEffectiveRoot) .map(s => api.Id.of(s.name.toString)) api.Singleton.of(api.Path.of(pathComponents.toArray.reverse ++ Array(Constants.thisPath))) - } def apiTypeParameter(tparam: ParamInfo): api.TypeParameter = apiTypeParameter(tparam.paramName.toString, tparam.paramVarianceSign, @@ -615,14 +581,13 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { api.TypeParameter.of(name, Array(), Array(), apiVariance(variance), apiType(lo), apiType(hi)) - def apiVariance(v: Int): api.Variance = { + def apiVariance(v: Int): api.Variance = import api.Variance._ if (v < 0) Contravariant else if (v > 0) Covariant else Invariant - } - def apiAccess(sym: Symbol): api.Access = { + def apiAccess(sym: Symbol): api.Access = // Symbols which are private[foo] do not have the flag Private set, // but their `privateWithin` exists, see `Parsers#ParserCommon#normalize`. if (!sym.isOneOf(Protected | Private) && !sym.privateWithin.exists) @@ -631,7 +596,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { Constants.privateLocal else if (sym.isAllOf(ProtectedLocal)) Constants.protectedLocal - else { + else val qualifier = if (sym.privateWithin eq NoSymbol) Constants.unqualified @@ -641,21 +606,18 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { api.Protected.of(qualifier) else api.Private.of(qualifier) - } - } - def apiModifiers(sym: Symbol): api.Modifiers = { + def apiModifiers(sym: Symbol): api.Modifiers = val absOver = sym.is(AbsOverride) val abs = absOver || sym.isOneOf(Trait | Abstract | Deferred) val over = absOver || sym.is(Override) new api.Modifiers(abs, over, sym.is(Final), sym.is(Sealed), sym.isOneOf(GivenOrImplicit), sym.is(Lazy), sym.is(Macro), sym.isSuperAccessor) - } /** `inlineOrigin` denotes an optional inline method that we are * currently hashing the body of. */ - def apiAnnotations(s: Symbol, inlineOrigin: Symbol): List[api.Annotation] = { + def apiAnnotations(s: Symbol, inlineOrigin: Symbol): List[api.Annotation] = val annots = new mutable.ListBuffer[api.Annotation] val inlineBody = Inlines.bodyToInline(s) if !inlineBody.isEmpty then @@ -687,7 +649,6 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { } annots.toList - } /** Produce a hash for a tree that is as stable as possible: * it should stay the same across compiler runs, compiler instances, @@ -816,7 +777,7 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { private def hashInlineParam(p: Symbol, h: Int) = MurmurHash3.mix(p.name.toString.hashCode, MurmurHash3.mix(h, InlineParamHash)) - def apiAnnotation(annot: Annotation): api.Annotation = { + def apiAnnotation(annot: Annotation): api.Annotation = // Like with inline defs, the whole body of the annotation and not just its // type is part of its API so we need to store its hash, but Zinc wants us // to extract the annotation type and its arguments, so we use a dummy @@ -827,5 +788,3 @@ private class ExtractAPICollector(using Context) extends ThunkHolder { api.Annotation.of( apiType(annot.tree.tpe), // Used by sbt to find tests to run Array(api.AnnotationArgument.of("TREE_HASH", treeHash(annot.tree, inlineOrigin = NoSymbol).toString))) - } -} diff --git a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala index fe5c8d061c78..75b46aee6a8d 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ExtractDependencies.scala @@ -46,17 +46,16 @@ import scala.collection.{Set, mutable} * * @see ExtractAPI */ -class ExtractDependencies extends Phase { +class ExtractDependencies extends Phase: import ExtractDependencies._ override def phaseName: String = ExtractDependencies.name override def description: String = ExtractDependencies.description - override def isRunnable(using Context): Boolean = { + override def isRunnable(using Context): Boolean = def forceRun = ctx.settings.YdumpSbtInc.value || ctx.settings.YforceSbtPhases.value super.isRunnable && (ctx.sbtCallback != null || forceRun) - } // Check no needed. Does not transform trees override def isCheckable: Boolean = false @@ -66,12 +65,12 @@ class ExtractDependencies extends Phase { // See the scripted test `constants` for an example where this matters. // TODO: Add a `Phase#runsBefore` method ? - override def run(using Context): Unit = { + override def run(using Context): Unit = val unit = ctx.compilationUnit val collector = new ExtractDependenciesCollector collector.traverse(unit.tpdTree) - if (ctx.settings.YdumpSbtInc.value) { + if (ctx.settings.YdumpSbtInc.value) val deps = collector.dependencies.map(_.toString).toArray[Object] val names = collector.usedNames.map { case (clazz, names) => s"$clazz: $names" }.toArray[Object] Arrays.sort(deps) @@ -79,7 +78,7 @@ class ExtractDependencies extends Phase { val pw = io.File(unit.source.file.jpath).changeExtension("inc").toFile.printWriter() // val pw = Console.out - try { + try pw.println("Used Names:") pw.println("===========") names.foreach(pw.println) @@ -87,37 +86,32 @@ class ExtractDependencies extends Phase { pw.println("Dependencies:") pw.println("=============") deps.foreach(pw.println) - } finally pw.close() - } + finally pw.close() - if (ctx.sbtCallback != null) { - collector.usedNames.foreach { + if (ctx.sbtCallback != null) + collector.usedNames.foreach: case (clazz, usedNames) => val className = classNameAsString(clazz) - usedNames.names.foreach { + usedNames.names.foreach: case (usedName, scopes) => ctx.sbtCallback.usedName(className, usedName.toString, scopes) - } - } collector.dependencies.foreach(recordDependency) - } - } /* * Handles dependency on given symbol by trying to figure out if represents a term * that is coming from either source code (not necessarily compiled in this compilation * run) or from class file and calls respective callback method. */ - def recordDependency(dep: ClassDependency)(using Context): Unit = { + def recordDependency(dep: ClassDependency)(using Context): Unit = val fromClassName = classNameAsString(dep.from) val sourceFile = ctx.compilationUnit.source.file.file def binaryDependency(file: File, binaryClassName: String) = ctx.sbtCallback.binaryDependency(file, binaryClassName, fromClassName, sourceFile, dep.context) - def processExternalDependency(depFile: AbstractFile, binaryClassName: String) = { - depFile match { + def processExternalDependency(depFile: AbstractFile, binaryClassName: String) = + depFile match case ze: ZipArchive#Entry => // The dependency comes from a JAR ze.underlyingSource match case Some(zip) if zip.file != null => @@ -134,27 +128,21 @@ class ExtractDependencies extends Phase { binaryDependency(pf.file, binaryClassName) case _ => internalError(s"Ignoring dependency $depFile of unknown class ${depFile.getClass}}", dep.from.srcPos) - } - } val depFile = dep.to.associatedFile - if (depFile != null) { + if (depFile != null) // Cannot ignore inheritance relationship coming from the same source (see sbt/zinc#417) def allowLocal = dep.context == DependencyByInheritance || dep.context == LocalDependencyByInheritance - if (depFile.extension == "class") { + if (depFile.extension == "class") // Dependency is external -- source is undefined processExternalDependency(depFile, dep.to.binaryClassName) - } else if (allowLocal || depFile.file != sourceFile) { + else if (allowLocal || depFile.file != sourceFile) // We cannot ignore dependencies coming from the same source file because // the dependency info needs to propagate. See source-dependencies/trait-trait-211. val toClassName = classNameAsString(dep.to) ctx.sbtCallback.classDependency(toClassName, fromClassName, dep.context) - } - } - } -} -object ExtractDependencies { +object ExtractDependencies: val name: String = "sbt-deps" val description: String = "sends information on classes' dependencies to sbt" @@ -164,21 +152,19 @@ object ExtractDependencies { /** Report an internal error in incremental compilation. */ def internalError(msg: => String, pos: SrcPos = NoSourcePosition)(using Context): Unit = report.error(em"Internal error in the incremental compiler while compiling ${ctx.compilationUnit.source}: $msg", pos) -} private case class ClassDependency(from: Symbol, to: Symbol, context: DependencyContext) /** An object that maintain the set of used names from within a class */ -private final class UsedNamesInClass { +private final class UsedNamesInClass: private val _names = new mutable.HashMap[Name, EnumSet[UseScope]] def names: collection.Map[Name, EnumSet[UseScope]] = _names - def update(name: Name, scope: UseScope): Unit = { + def update(name: Name, scope: UseScope): Unit = val scopes = _names.getOrElseUpdate(name, EnumSet.noneOf(classOf[UseScope])) scopes.add(scope) - } - override def toString(): String = { + override def toString(): String = val builder = new StringBuilder names.foreach { case (name, scopes) => builder.append(name.mangledString) @@ -188,8 +174,6 @@ private final class UsedNamesInClass { builder.append(", ") } builder.toString() - } -} /** Extract the dependency information of a compilation unit. * @@ -218,21 +202,18 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT * class/trait/object declared in the compilation unit. If none exists, issue warning. */ private var _responsibleForImports: Symbol = _ - private def responsibleForImports(using Context) = { - def firstClassOrModule(tree: Tree) = { - val acc = new TreeAccumulator[Symbol] { + private def responsibleForImports(using Context) = + def firstClassOrModule(tree: Tree) = + val acc = new TreeAccumulator[Symbol]: def apply(x: Symbol, t: Tree)(using Context) = - t match { + t match case typeDef: TypeDef => typeDef.symbol case other => foldOver(x, other) - } - } acc(NoSymbol, tree) - } - if (_responsibleForImports == null) { + if (_responsibleForImports == null) val tree = ctx.compilationUnit.tpdTree _responsibleForImports = firstClassOrModule(tree) if (!_responsibleForImports.exists) @@ -240,9 +221,7 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT |The incremental compiler cannot record the dependency information in such case. |Some errors like unused import referring to a non-existent class might not be reported. |""".stripMargin, tree.sourcePos) - } _responsibleForImports - } private var lastOwner: Symbol = _ private var lastDepSource: Symbol = _ @@ -251,60 +230,49 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT * Resolves dependency source (that is, the closest non-local enclosing * class from a given `ctx.owner` */ - private def resolveDependencySource(using Context): Symbol = { - def nonLocalEnclosingClass = { + private def resolveDependencySource(using Context): Symbol = + def nonLocalEnclosingClass = var clazz = ctx.owner.enclosingClass var owner = clazz - while (!owner.is(PackageClass)) { - if (owner.isTerm) { + while (!owner.is(PackageClass)) + if (owner.isTerm) clazz = owner.enclosingClass owner = clazz - } else { + else owner = owner.owner - } - } clazz - } - if (lastOwner != ctx.owner) { + if (lastOwner != ctx.owner) lastOwner = ctx.owner val source = nonLocalEnclosingClass lastDepSource = if (source.is(PackageClass)) responsibleForImports else source - } lastDepSource - } - private def addUsedName(fromClass: Symbol, name: Name, scope: UseScope): Unit = { + private def addUsedName(fromClass: Symbol, name: Name, scope: UseScope): Unit = val usedName = _usedNames.getOrElseUpdate(fromClass, new UsedNamesInClass) usedName.update(name, scope) - } - private def addUsedName(name: Name, scope: UseScope)(using Context): Unit = { + private def addUsedName(name: Name, scope: UseScope)(using Context): Unit = val fromClass = resolveDependencySource - if (fromClass.exists) { // can happen when visiting imports + if (fromClass.exists) // can happen when visiting imports assert(fromClass.isClass) addUsedName(fromClass, name, scope) - } - } private def addMemberRefDependency(sym: Symbol)(using Context): Unit = - if (!ignoreDependency(sym)) { + if (!ignoreDependency(sym)) val enclOrModuleClass = if (sym.is(ModuleVal)) sym.moduleClass else sym.enclosingClass assert(enclOrModuleClass.isClass, s"$enclOrModuleClass, $sym") val fromClass = resolveDependencySource - if (fromClass.exists) { // can happen when visiting imports + if (fromClass.exists) // can happen when visiting imports assert(fromClass.isClass) addUsedName(fromClass, sym.zincMangledName, UseScope.Default) // packages have class symbol. Only record them as used names but not dependency - if (!sym.is(Package)) { + if (!sym.is(Package)) _dependencies += ClassDependency(fromClass, enclOrModuleClass, DependencyByMemberRef) - } - } - } private def addInheritanceDependencies(tree: Closure)(using Context): Unit = // If the tpt is empty, this is a non-SAM lambda, so no need to register @@ -314,12 +282,11 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT _dependencies += ClassDependency(from, tree.tpt.tpe.classSymbol, LocalDependencyByInheritance) private def addInheritanceDependencies(tree: Template)(using Context): Unit = - if (tree.parents.nonEmpty) { + if (tree.parents.nonEmpty) val depContext = depContextOf(tree.symbol.owner) val from = resolveDependencySource for parent <- tree.parents do _dependencies += ClassDependency(from, parent.tpe.classSymbol, depContext) - } private def depContextOf(cls: Symbol)(using Context): DependencyContext = if cls.isLocal then LocalDependencyByInheritance @@ -341,18 +308,17 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT /** Traverse the tree of a source file and record the dependencies and used names which * can be retrieved using `dependencies` and`usedNames`. */ - override def traverse(tree: Tree)(using Context): Unit = try { - tree match { + override def traverse(tree: Tree)(using Context): Unit = try + tree match case Match(selector, _) => addPatMatDependency(selector.tpe) case Import(expr, selectors) => def lookupImported(name: Name) = expr.tpe.member(name).symbol - def addImported(name: Name) = { + def addImported(name: Name) = // importing a name means importing both a term and a type (if they exist) addMemberRefDependency(lookupImported(name.toTermName)) addMemberRefDependency(lookupImported(name.toTypeName)) - } for sel <- selectors if !sel.isWildcard do addImported(sel.name) if sel.rename != sel.name then @@ -381,9 +347,8 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT case t: Template => addInheritanceDependencies(t) case _ => - } - tree match { + tree match case Inlined(call, _, _) if !call.isEmpty => // The inlined call is normally ignored by TreeTraverser but we need to // record it as a dependency @@ -397,12 +362,10 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT t.body.foreach(traverse) case _ => traverseChildren(tree) - } - } catch { + catch case ex: AssertionError => println(i"asserted failed while traversing $tree") throw ex - } /** Traverse a used type and record all the dependencies we need to keep track * of for incremental recompilation. @@ -437,16 +400,16 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT * The tests in sbt `types-in-used-names-a`, `types-in-used-names-b`, * `as-seen-from-a` and `as-seen-from-b` rely on this. */ - private abstract class TypeDependencyTraverser(using Context) extends TypeTraverser() { + private abstract class TypeDependencyTraverser(using Context) extends TypeTraverser(): protected def addDependency(symbol: Symbol): Unit // Avoid cycles by remembering both the types (testcase: // tests/run/enum-values.scala) and the symbols of named types (testcase: // tests/pos-java-interop/i13575) we've seen before. val seen = new mutable.HashSet[Symbol | Type] - def traverse(tp: Type): Unit = if (!seen.contains(tp)) { + def traverse(tp: Type): Unit = if (!seen.contains(tp)) seen += tp - tp match { + tp match case tp: NamedType => val sym = tp.symbol if !seen.contains(sym) && !sym.is(Package) then @@ -462,25 +425,17 @@ private class ExtractDependenciesCollector extends tpd.TreeTraverser { thisTreeT traverse(tp.underlying) case _ => traverseChildren(tp) - } - } - } - def addTypeDependency(tpe: Type)(using Context): Unit = { - val traverser = new TypeDependencyTraverser { + def addTypeDependency(tpe: Type)(using Context): Unit = + val traverser = new TypeDependencyTraverser: def addDependency(symbol: Symbol) = addMemberRefDependency(symbol) - } traverser.traverse(tpe) - } - def addPatMatDependency(tpe: Type)(using Context): Unit = { - val traverser = new TypeDependencyTraverser { + def addPatMatDependency(tpe: Type)(using Context): Unit = + val traverser = new TypeDependencyTraverser: def addDependency(symbol: Symbol) = - if (!ignoreDependency(symbol) && symbol.is(Sealed)) { + if (!ignoreDependency(symbol) && symbol.is(Sealed)) val usedName = symbol.zincMangledName addUsedName(usedName, UseScope.PatMatTarget) - } - } traverser.traverse(tpe) - } } diff --git a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala index cacb10cf98bc..3f77067e6ab2 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ShowAPI.scala @@ -15,15 +15,14 @@ import xsbti.api._ import scala.util.Try -object DefaultShowAPI { +object DefaultShowAPI: private lazy val defaultNesting = Try { java.lang.Integer.parseInt(sys.props.get("sbt.inc.apidiff.depth").get) } getOrElse 2 def apply(d: Definition): String = ShowAPI.showDefinition(d)(defaultNesting) def apply(d: Type): String = ShowAPI.showType(d)(defaultNesting) def apply(a: ClassLike): String = ShowAPI.showApi(a)(defaultNesting) -} -object ShowAPI { +object ShowAPI: private lazy val numDecls = Try { java.lang.Integer.parseInt(sys.props.get("sbt.inc.apidiff.decls").get) } getOrElse 0 private def truncateDecls(decls: Array[ClassDefinition]): Array[ClassDefinition] = if (numDecls <= 0) decls else decls.take(numDecls) @@ -32,7 +31,7 @@ object ShowAPI { def showApi(c: ClassLike)(implicit nesting: Int): String = showDefinition(c) - def showDefinition(d: Definition)(implicit nesting: Int): String = d match { + def showDefinition(d: Definition)(implicit nesting: Int): String = d match case v: Val => showMonoDef(v, "val") + ": " + showType(v.tpe) case v: Var => showMonoDef(v, "var") + ": " + showType(v.tpe) case d: Def => showPolyDef(d, "def") + showValueParams(d.valueParameters.toIndexedSeq) + ": " + showType(d.returnType) @@ -41,20 +40,18 @@ object ShowAPI { case cl: ClassLike => showMonoDef(d, showDefinitionType(cl.definitionType)) + showTypeParameters(cl.typeParameters.toIndexedSeq) + " extends " + showTemplate(cl) case cl: ClassLikeDef => showPolyDef(cl, showDefinitionType(cl.definitionType)) - } private def showTemplate(cl: ClassLike)(implicit nesting: Int) = if (nesting <= 0) "" - else { + else val showSelf = if (cl.selfType.isInstanceOf[EmptyType]) "" else " self: " + showNestedType(cl.selfType) + " =>" cl.structure.parents.map(showNestedType).mkString("", " with ", " {") + showSelf + lines(truncateDecls(cl.structure.inherited).toIndexedSeq.map(d => "^inherited^ " + showNestedDefinition(d))) + lines(truncateDecls(cl.structure.declared).toIndexedSeq.map(showNestedDefinition)) + "}" - } - def showType(t: Type)(implicit nesting: Int): String = t match { + def showType(t: Type)(implicit nesting: Int): String = t match case st: Projection => showType(st.prefix) + "#" + st.id case st: ParameterRef => "<" + st.id + ">" case st: Singleton => showPath(st.path) @@ -75,15 +72,13 @@ object ShowAPI { case p: Polymorphic => showType(p.baseType) + ( if (nesting <= 0) " [ ]" else showNestedTypeParameters(p.parameters.toIndexedSeq) - ) - } + ) private def showPath(p: Path): String = p.components.map(showPathComponent).mkString(".") - private def showPathComponent(pc: PathComponent) = pc match { + private def showPathComponent(pc: PathComponent) = pc match case s: Super => "super[" + showPath(s.qualifier) + "]" case _: This => "this" case i: Id => i.id - } private def space(s: String) = if (s.isEmpty) s else s + " " private def showMonoDef(d: Definition, label: String)(implicit nesting: Int): String = @@ -113,30 +108,26 @@ object ShowAPI { pl.parameters.map(mp => mp.name + ": " + showParameterModifier(showType(mp.tpe), mp.modifier) + (if (mp.hasDefault) "= ..." else "")).mkString(if (pl.isImplicit) "(implicit " else "(", ", ", ")")).mkString("") - private def showParameterModifier(base: String, pm: ParameterModifier): String = pm match { + private def showParameterModifier(base: String, pm: ParameterModifier): String = pm match case ParameterModifier.Plain => base case ParameterModifier.Repeated => base + "*" case ParameterModifier.ByName => "=> " + base - } - private def showDefinitionType(d: DefinitionType) = d match { + private def showDefinitionType(d: DefinitionType) = d match case DefinitionType.Trait => "trait" case DefinitionType.ClassDef => "class" case DefinitionType.Module => "object" case DefinitionType.PackageModule => "package object" - } - private def showAccess(a: Access) = a match { + private def showAccess(a: Access) = a match case p: Public => "" case p: Protected => "protected" + showQualifier(p.qualifier) case p: Private => "private" + showQualifier(p.qualifier) - } - private def showQualifier(q: Qualifier) = q match { + private def showQualifier(q: Qualifier) = q match case _: Unqualified => "" case _: ThisQualifier => "[this]" case i: IdQualifier => "[" + i.value + "]" - } private def showModifiers(m: Modifiers) = List( (m.isOverride, "override"), @@ -147,16 +138,14 @@ object ShowAPI { (m.isLazy, "lazy") ).collect { case (true, mod) => mod }.mkString(" ") - private def showVariance(v: Variance) = v match { + private def showVariance(v: Variance) = v match case Variance.Invariant => "" case Variance.Covariant => "+" case Variance.Contravariant => "-" - } // limit nesting to prevent cycles and generally keep output from getting humongous private def showNestedType(tp: Type)(implicit nesting: Int) = showType(tp)(nesting - 1) private def showNestedTypeParameter(tp: TypeParameter)(implicit nesting: Int) = showTypeParameter(tp)(nesting - 1) private def showNestedTypeParameters(tps: Seq[TypeParameter])(implicit nesting: Int) = showTypeParameters(tps)(nesting - 1) private def showNestedDefinition(d: Definition)(implicit nesting: Int) = showDefinition(d)(nesting - 1) -} diff --git a/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala b/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala index 60aa76c91ed4..232621d9fa51 100644 --- a/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala +++ b/compiler/src/dotty/tools/dotc/sbt/ThunkHolder.scala @@ -9,24 +9,21 @@ import xsbti.api /** Create and hold thunks. A thunk is a (potentially) unevaluated value * that may be evaluated once. */ -private[sbt] trait ThunkHolder { +private[sbt] trait ThunkHolder: private val thunks = new ListBuffer[api.Lazy[?]] /** Force all unevaluated thunks to prevent space leaks. */ - @tailrec protected final def forceThunks(): Unit = if (!thunks.isEmpty) { + @tailrec protected final def forceThunks(): Unit = if (!thunks.isEmpty) val toForce = thunks.toList thunks.clear() toForce.foreach(_.get()) // Forcing thunks may create new thunks forceThunks() - } /** Store the by-name parameter `s` in a `Lazy` container without evaluating it. * It will be forced by the next call to `forceThunks()` */ - def lzy[T <: AnyRef](t: => T): api.Lazy[T] = { + def lzy[T <: AnyRef](t: => T): api.Lazy[T] = val l = api.SafeLazy.apply(() => t).nn thunks += l l - } -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala index 975d5480fe9b..7d2d004c7689 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ConstantOps.scala @@ -9,7 +9,7 @@ import core.Constants._ object ConstantOps: extension (const: Constant) - def toSemanticConst(using Context): s.Constant = const.tag match { + def toSemanticConst(using Context): s.Constant = const.tag match case UnitTag => s.UnitConstant() case BooleanTag => s.BooleanConstant(const.booleanValue) case ByteTag => s.ByteConstant(const.byteValue) @@ -22,4 +22,3 @@ object ConstantOps: case StringTag => s.StringConstant(const.stringValue) case NullTag => s.NullConstant() case _ => throw new Error(s"Constant ${const} can't be converted to Semanticdb Constant.") - } diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Descriptor.scala b/compiler/src/dotty/tools/dotc/semanticdb/Descriptor.scala index 2743e37cb79f..80431b9dffa0 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/Descriptor.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/Descriptor.scala @@ -5,103 +5,89 @@ import scala.language.unsafeNulls import java.lang.System.{lineSeparator => EOL} import dotty.tools.dotc.semanticdb.{Descriptor => d} -class DescriptorParser(s: String) { +class DescriptorParser(s: String): var i = s.length - def fail() = { + def fail() = val message = "invalid symbol format" val caret = " " * i + "^" sys.error(s"$message$EOL$s$EOL$caret") - } val BOF = '\u0000' val EOF = '\u001A' var currChar = EOF - def readChar(): Char = { - if (i <= 0) { - if (i == 0) { + def readChar(): Char = + if (i <= 0) + if (i == 0) i -= 1 currChar = BOF currChar - } else { + else fail() - } - } else { + else i -= 1 currChar = s(i) currChar - } - } - def parseValue(): String = { - if (currChar == '`') { + def parseValue(): String = + if (currChar == '`') val end = i while (readChar() != '`') {} readChar() s.substring(i + 2, end) - } else { + else val end = i + 1 if (!Character.isJavaIdentifierPart(currChar)) fail() while (Character.isJavaIdentifierPart(readChar()) && currChar != BOF) {} s.substring(i + 1, end) - } - } - def parseDisambiguator(): String = { + def parseDisambiguator(): String = val end = i + 1 if (currChar != ')') fail() while (readChar() != '(') {} readChar() s.substring(i + 1, end) - } - def parseDescriptor(): Descriptor = { - if (currChar == '.') { + def parseDescriptor(): Descriptor = + if (currChar == '.') readChar() - if (currChar == ')') { + if (currChar == ')') val disambiguator = parseDisambiguator() val value = parseValue() d.Method(value, disambiguator) - } else { + else d.Term(parseValue()) - } - } else if (currChar == '#') { + else if (currChar == '#') readChar() d.Type(parseValue()) - } else if (currChar == '/') { + else if (currChar == '/') readChar() d.Package(parseValue()) - } else if (currChar == ')') { + else if (currChar == ')') readChar() val value = parseValue() if (currChar != '(') fail() else readChar() d.Parameter(value) - } else if (currChar == ']') { + else if (currChar == ']') readChar() val value = parseValue() if (currChar != '[') fail() else readChar() d.TypeParameter(value) - } else { + else fail() - } - } - def entryPoint(): (Descriptor, String) = { + def entryPoint(): (Descriptor, String) = readChar() val desc = parseDescriptor() (desc, s.substring(0, i + 1)) - } -} -object DescriptorParser { - def apply(symbol: String): (Descriptor, String) = { +object DescriptorParser: + def apply(symbol: String): (Descriptor, String) = val parser = new DescriptorParser(symbol) parser.entryPoint() - } -} -sealed trait Descriptor { +sealed trait Descriptor: def isNone: Boolean = this == d.None def isTerm: Boolean = this.isInstanceOf[d.Term] def isMethod: Boolean = this.isInstanceOf[d.Method] @@ -110,8 +96,7 @@ sealed trait Descriptor { def isParameter: Boolean = this.isInstanceOf[d.Parameter] def isTypeParameter: Boolean = this.isInstanceOf[d.TypeParameter] def value: String -} -object Descriptor { +object Descriptor: case object None extends Descriptor { def value: String = "" } final case class Term(value: String) extends Descriptor final case class Method(value: String, disambiguator: String) extends Descriptor @@ -119,4 +104,3 @@ object Descriptor { final case class Package(value: String) extends Descriptor final case class Parameter(value: String) extends Descriptor final case class TypeParameter(value: String) extends Descriptor -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala index 91614aaccad2..70c7b36f5edb 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/ExtractSemanticDB.scala @@ -151,7 +151,7 @@ class ExtractSemanticDB extends Phase: case tree => registerDefinition(tree.symbol, tree.span, Set.empty, tree.source) case tree: NamedDefTree => if !tree.symbol.isAllOf(ModuleValCreationFlags) then - tree match { + tree match case tree: ValDef if tree.symbol.isAllOf(EnumValue) => tree.rhs match case Block(TypeDef(_, template: Template) :: _, _) => // simple case with specialised extends clause @@ -182,7 +182,6 @@ class ExtractSemanticDB extends Phase: case tree => if !excludeChildren(tree.symbol) then traverseChildren(tree) - } if !excludeDef(tree.symbol) && (tree.span.hasLength || tree.symbol.isAnonymousClass) then registerDefinition(tree.symbol, tree.nameSpan, symbolKinds(tree), tree.source) val privateWithin = tree.symbol.privateWithin @@ -312,11 +311,11 @@ class ExtractSemanticDB extends Phase: def impl(acc: List[Tree], pats: List[Tree]): List[Tree] = pats match case pat::pats => pat match - case Typed(UnApply(fun: Tree, _, args), tpt: Tree) => impl(fun::tpt::acc, args:::pats) - case Typed(obj: Ident, tpt: Tree) => impl(obj::tpt::acc, pats) - case UnApply(fun: Tree, _, args) => impl(fun::acc, args:::pats) - case obj: Ident => impl(obj::acc, pats) - case _ => impl(acc, pats) + case Typed(UnApply(fun: Tree, _, args), tpt: Tree) => impl(fun::tpt::acc, args:::pats) + case Typed(obj: Ident, tpt: Tree) => impl(obj::tpt::acc, pats) + case UnApply(fun: Tree, _, args) => impl(fun::acc, args:::pats) + case obj: Ident => impl(obj::acc, pats) + case _ => impl(acc, pats) case Nil => acc diff --git a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala index b53ee787f501..4738bd55cca3 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/PPrint.scala @@ -24,10 +24,9 @@ class SymbolInformationPrinter (symtab: PrinterSymtab): def visit(sym: String): SymbolInformation = val symtabInfo = noteSymtab.get(sym).orElse(symtab.info(sym)) - symtabInfo.getOrElse { + symtabInfo.getOrElse: val displayName = if sym.isGlobal then sym.desc.value else sym SymbolInformation(symbol = sym, displayName = displayName) - } end InfoNotes class InfoPrinter(notes: InfoNotes): @@ -104,11 +103,10 @@ class SymbolInformationPrinter (symtab: PrinterSymtab): sb.append(tparams.infos.map(pprintDef).mkString("[", ", ", "] ")) if (parents.nonEmpty) sb.append(parents.map(pprint).mkString("extends ", " with ", " ")) - if (self.isDefined || decls.infos.nonEmpty) { + if (self.isDefined || decls.infos.nonEmpty) val selfStr = if (self.isDefined) s"self: ${pprint(self)} =>" else "" val declsStr = if (decls.infos.nonEmpty) s"+${decls.infos.length} decls" else "" sb.append(s"{ ${selfStr} ${declsStr} }") - } sb.toString case MethodSignature(tparams, paramss, res) => val sb = new StringBuilder() @@ -124,9 +122,9 @@ class SymbolInformationPrinter (symtab: PrinterSymtab): val sb = new StringBuilder() if (tparams.infos.nonEmpty) sb.append(tparams.infos.map(pprintDef).mkString("[", ", ", "]")) - if (lo == hi) { + if (lo == hi) sb.append(s" = ${pprint(lo)}") - } else { + else lo match case TypeRef(Type.Empty, "scala/Nothing#", Nil) => () case lo => sb.append(s" >: ${pprint(lo)}") @@ -134,31 +132,28 @@ class SymbolInformationPrinter (symtab: PrinterSymtab): case TypeRef(Type.Empty, "scala/Any#", Nil) => () case TypeRef(Type.Empty, "java/lang/Object#", Nil) => () case hi => sb.append(s" <: ${pprint(hi)}") - } sb.toString case ValueSignature(tpe) => pprint(tpe) case _ => "" - protected def pprint(tpe: Type): String = { + protected def pprint(tpe: Type): String = def prefix(tpe: Type): String = tpe match case TypeRef(pre, sym, args) => - val preStr = pre match { + val preStr = pre match case _: SingleType | _: ThisType | _: SuperType => s"${prefix(pre)}." case Type.Empty => "" case _ => s"${prefix(pre)}#" - } val argsStr = if (args.nonEmpty) args.map(normal).mkString("[", ", ", "]") else "" s"${preStr}${pprintRef(sym)}${argsStr}" case SingleType(pre, sym) => - pre match { + pre match case Type.Empty => pprintRef(sym) case _ => s"${prefix(pre)}.${pprintRef(sym)}" - } case ThisType(sym) => s"${pprintRef(sym)}.this" case SuperType(pre, sym) => @@ -209,15 +204,13 @@ class SymbolInformationPrinter (symtab: PrinterSymtab): case _ => prefix(tpe) normal(tpe) - } private def pprint(ann: Annotation): String = - ann.tpe match { + ann.tpe match case Type.Empty => s"@" case tpe => s"@${pprint(tpe)}" - } - protected def pprint(const: Constant): String = const match { + protected def pprint(const: Constant): String = const match case Constant.Empty => "" case UnitConstant() => @@ -244,7 +237,6 @@ class SymbolInformationPrinter (symtab: PrinterSymtab): "\"" + value + "\"" case NullConstant() => "null" - } private def accessString(access: Access): String = access match @@ -266,32 +258,28 @@ class SymbolInformationPrinter (symtab: PrinterSymtab): scope.hardlinks.toList extension (scope: Option[Scope]) - private def infos: List[SymbolInformation] = scope match { + private def infos: List[SymbolInformation] = scope match case Some(s) => s.infos case None => Nil - } end InfoPrinter end SymbolInformationPrinter extension (info: SymbolInformation) - def prefixBeforeTpe: String = { - info.kind match { + def prefixBeforeTpe: String = + info.kind match case LOCAL | FIELD | PARAMETER | SELF_PARAMETER | UNKNOWN_KIND | Unrecognized(_) => ": " case METHOD | CONSTRUCTOR | MACRO | TYPE | TYPE_PARAMETER | OBJECT | PACKAGE | PACKAGE_OBJECT | CLASS | TRAIT | INTERFACE => " " - } - } trait PrinterSymtab: def info(symbol: String): Option[SymbolInformation] object PrinterSymtab: def fromTextDocument(doc: TextDocument): PrinterSymtab = val map = doc.symbols.map(info => (info.symbol, info)).toMap - new PrinterSymtab { + new PrinterSymtab: override def info(symbol: String): Option[SymbolInformation] = map.get(symbol) - } def processRange(sb: StringBuilder, range: Range): Unit = sb.append('[') @@ -353,7 +341,7 @@ class SyntheticPrinter(symtab: PrinterSymtab, source: SourceFile) extends Symbol } private def processTree(tree: Tree)(using sb: StringBuilder): Unit = - tree match { + tree match case tree: ApplyTree => processTree(tree.function) sb.append("(") @@ -395,7 +383,6 @@ class SyntheticPrinter(symtab: PrinterSymtab, source: SourceFile) extends Symbol case _ => sb.append("") - } end SyntheticPrinter diff --git a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala index e157b52fe260..f0b4962511d4 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/Scala3.scala @@ -59,9 +59,8 @@ object Scala3: // println(name.mangledString) nameInSource == name.mangledString - sealed trait FakeSymbol { + sealed trait FakeSymbol: private[Scala3] var sname: Option[String] = None - } /** Fake symbol that represents wildcard symbol which will be converted to * semanticdb symbol with @@ -88,11 +87,10 @@ object Scala3: sym match case s: Symbol => builder.symbolName(s) case s: FakeSymbol => - s.sname.getOrElse { + s.sname.getOrElse: val sname = builder.symbolName(s) s.sname = Some(sname) sname - } def symbolInfo(symkinds: Set[SymbolKind])(using LinkMode, TypeOps, SemanticSymbolBuilder, Context): SymbolInformation = sym match @@ -100,10 +98,9 @@ object Scala3: val kind = s.symbolKind(symkinds) val sname = sym.symbolName val signature = s.info.toSemanticSig(s) - val symbolAnnotations = s.annotations.collect{ + val symbolAnnotations = s.annotations.collect: case annot if annot.symbol != defn.BodyAnnot && annot.symbol != defn.ChildAnnot => Annotation(annot.tree.tpe.toSemanticType(annot.symbol)) - } SymbolInformation( symbol = sname, language = Language.SCALA, @@ -153,7 +150,7 @@ object Scala3: displayName = s.name.show.unescapeUnicode, properties = SymbolInformation.Property.ABSTRACT.value, - signature = signature, + signature = signature, ) end SemanticSymbolOps diff --git a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala index b0d032c7d83b..4dcc438d7897 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/TypeOps.scala @@ -66,7 +66,7 @@ class TypeOps: fakeSymbols.add(sym) extension (tpe: Type) - def lookupSym(name: Name)(using Context): Option[Symbol] = { + def lookupSym(name: Name)(using Context): Option[Symbol] = def loop(ty: Type): Option[Symbol] = ty match case rt: RefinedType => refinementSymtab.lookup(rt, name).orElse( @@ -81,11 +81,10 @@ class TypeOps: case _ => None loop(tpe.dealias) - } def toSemanticSig(using LinkMode, Context, SemanticSymbolBuilder)(sym: Symbol): s.Signature = def enterParamRef(tpe: Type): Unit = - tpe match { + tpe match case lam: LambdaType => // Find the "actual" binder type for nested LambdaType // For example, `def foo(x: T)(y: T): T` and for `.owner.info` would be like @@ -119,10 +118,9 @@ class TypeOps: enterParamRef(tb.hi) case _ => () - } def enterRefined(tpe: Type): Unit = - tpe match { + tpe match case refined: RefinedType => val key = (refined, sym.name) refinementSymtab(key) = sym @@ -168,35 +166,31 @@ class TypeOps: enterRefined(t1) enterRefined(t2) case _ => () - } if sym.exists && sym.owner.exists then enterParamRef(sym.owner.info) enterRefined(sym.owner.info) - def loop(tpe: Type): s.Signature = tpe match { + def loop(tpe: Type): s.Signature = tpe match case mp: MethodOrPoly => def flatten( t: Type, paramss: List[List[SemanticSymbol]], tparams: List[SemanticSymbol] - ): (Type, List[List[SemanticSymbol]], List[SemanticSymbol]) = t match { + ): (Type, List[List[SemanticSymbol]], List[SemanticSymbol]) = t match case mt: MethodType => val syms: List[SemanticSymbol] = mt.paramNames.zip(mt.paramInfos).map { (name, info) => - paramRefSymtab.lookup(mt, name).getOrElse { + paramRefSymtab.lookup(mt, name).getOrElse: TermParamRefSymbol(sym, name, info).tap(registerFakeSymbol) - } } flatten(mt.resType, paramss :+ syms, tparams) case pt: PolyType => val syms: List[SemanticSymbol] = pt.paramNames.zip(pt.paramInfos).map { (name, info) => - paramRefSymtab.lookup(pt, name).getOrElse { + paramRefSymtab.lookup(pt, name).getOrElse: TypeParamRefSymbol(sym, name, info).tap(registerFakeSymbol) - } } flatten(pt.resType, paramss, tparams ++ syms) case other => (other, paramss, tparams) - } val (resType, paramss, tparams) = flatten(mp, Nil, Nil) val sparamss = paramss.map(_.sscope) @@ -216,20 +210,18 @@ class TypeOps: case TypeBounds(lo, hi) => // for `type X[T] = T` is equivalent to `[T] =>> T` - def tparams(tpe: Type): (Type, List[SemanticSymbol]) = tpe match { + def tparams(tpe: Type): (Type, List[SemanticSymbol]) = tpe match case lambda: HKTypeLambda => val paramSyms: List[SemanticSymbol] = lambda.paramNames.zip(lambda.paramInfos).map { (paramName, bounds) => // def x[T[_]] = ??? if paramName.isWildcard then WildcardTypeSymbol(sym, bounds).tap(registerFakeSymbol) else - paramRefSymtab.lookup(lambda, paramName).getOrElse { + paramRefSymtab.lookup(lambda, paramName).getOrElse: TypeParamRefSymbol(sym, paramName, bounds).tap(registerFakeSymbol) - } } (lambda.resType, paramSyms) case _ => (tpe, Nil) - } val (loRes, loParams) = tparams(lo) val (hiRes, hiParams) = tparams(hi) val stparams = (loParams ++ hiParams).distinctBy(_.name).sscopeOpt @@ -241,12 +233,11 @@ class TypeOps: s.ValueSignature( other.toSemanticType(sym) ) - } loop(tpe) def toSemanticType(sym: Symbol)(using LinkMode, SemanticSymbolBuilder, Context): s.Type = import ConstantOps._ - def loop(tpe: Type): s.Type = tpe match { + def loop(tpe: Type): s.Type = tpe match case t if t.isFromJavaObject => loop(defn.AnyType) case ExprType(tpe) => @@ -339,14 +330,13 @@ class TypeOps: val key = (lam, paramName) paramRefSymtab.get(key) }.sscope - lam.resType match { + lam.resType match case defn.MatchCase(key, body) => s.MatchType.CaseType( loop(key), loop(body) ) case _ => s.MatchType.CaseType() // shouldn't happen - } case defn.MatchCase(key, body) => val skey = loop(key) val sbody = loop(body) @@ -368,30 +358,27 @@ class TypeOps: // refinedInfo = TypeRef(..., Int) // ) type RefinedInfo = (core.Names.Name, Type) - def flatten(tpe: Type, acc: List[RefinedInfo]): (Type, List[RefinedInfo]) = tpe match { + def flatten(tpe: Type, acc: List[RefinedInfo]): (Type, List[RefinedInfo]) = tpe match case RefinedType(parent, name, info) => flatten(parent, acc :+ (name, info)) case _ => (tpe, acc) - } // flatten parent types to list // e.g. `X with Y with Z { refined }` // RefinedType(parent = AndType(X, AndType(Y, Z)), ...) // => List(X, Y, Z) - def flattenParent(parent: Type): List[s.Type] = parent match { + def flattenParent(parent: Type): List[s.Type] = parent match case AndType(tp1, tp2) => flattenParent(tp1) ++ flattenParent(tp2) case _ => List(loop(parent)) - } val (parent, refinedInfos) = flatten(rt, List.empty) val stpe = s.IntersectionType(flattenParent(parent)) val decls: List[SemanticSymbol] = refinedInfos.map { (name, info) => - refinementSymtab.lookup(rt, name).getOrElse { + refinementSymtab.lookup(rt, name).getOrElse: RefinementSymbol(sym, name, info).tap(registerFakeSymbol) - } } val sdecls = decls.sscopeOpt(using LinkMode.HardlinkChildren) s.StructuralType(stpe, sdecls) @@ -489,9 +476,8 @@ class TypeOps: if paramName.isWildcard then WildcardTypeSymbol(sym, bounds).tap(registerFakeSymbol) else - paramRefSymtab.lookup(lambda, paramName).getOrElse { + paramRefSymtab.lookup(lambda, paramName).getOrElse: TypeParamRefSymbol(sym, paramName, bounds).tap(registerFakeSymbol) - } } val parameters = paramSyms.sscopeOpt(using LinkMode.HardlinkChildren) @@ -506,14 +492,13 @@ class TypeOps: case _ => s.Type.Empty - } loop(tpe) /** Return true if the prefix is like `_root_.this` */ private def hasTrivialPrefix(using Context): Boolean = def checkTrivialPrefix(pre: Type, sym: Symbol)(using Context): Boolean = pre =:= sym.owner.thisType - tpe match { + tpe match case TypeRef(pre, sym: Symbol) => checkTrivialPrefix(pre, sym) case tr @ TypeRef(pre, _) if tr.symbol != NoSymbol => @@ -523,7 +508,6 @@ class TypeOps: case tr @ TermRef(pre, _) if tr.symbol != NoSymbol => checkTrivialPrefix(pre, tr.symbol) case _ => false - } object SymbolScopeOps: diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala index c646e67b69ad..f71eece0e58c 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Access.scala @@ -8,22 +8,21 @@ package dotty.tools.dotc.semanticdb import dotty.tools.dotc.semanticdb.internal._ import scala.annotation.internal.sharable -sealed trait Access extends SemanticdbGeneratedSealedOneof derives CanEqual { +sealed trait Access extends SemanticdbGeneratedSealedOneof derives CanEqual: type MessageType = dotty.tools.dotc.semanticdb.AccessMessage final def isEmpty = this.isInstanceOf[dotty.tools.dotc.semanticdb.Access.Empty.type] final def isDefined = !isEmpty final def asMessage: dotty.tools.dotc.semanticdb.AccessMessage = dotty.tools.dotc.semanticdb.Access.AccessTypeMapper.toBase(this) final def asNonEmpty: Option[dotty.tools.dotc.semanticdb.Access.NonEmpty] = if (isEmpty) None else Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Access.NonEmpty]) -} -object Access { +object Access: case object Empty extends dotty.tools.dotc.semanticdb.Access sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Access def defaultInstance: dotty.tools.dotc.semanticdb.Access = Empty - implicit val AccessTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] { - override def toCustom(__base: dotty.tools.dotc.semanticdb.AccessMessage): dotty.tools.dotc.semanticdb.Access = __base.sealedValue match { + implicit val AccessTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.AccessMessage, dotty.tools.dotc.semanticdb.Access]: + override def toCustom(__base: dotty.tools.dotc.semanticdb.AccessMessage): dotty.tools.dotc.semanticdb.Access = __base.sealedValue match case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateAccess => __v.value case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateThisAccess => __v.value case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateWithinAccess => __v.value @@ -32,7 +31,6 @@ object Access { case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.ProtectedWithinAccess => __v.value case __v: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PublicAccess => __v.value case dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty => Empty - } override def toBase(__custom: dotty.tools.dotc.semanticdb.Access): dotty.tools.dotc.semanticdb.AccessMessage = dotty.tools.dotc.semanticdb.AccessMessage(__custom match { case __v: dotty.tools.dotc.semanticdb.PrivateAccess => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateAccess(__v) case __v: dotty.tools.dotc.semanticdb.PrivateThisAccess => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateThisAccess(__v) @@ -43,15 +41,13 @@ object Access { case __v: dotty.tools.dotc.semanticdb.PublicAccess => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PublicAccess(__v) case Empty => dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty }) - } -} @SerialVersionUID(0L) final case class AccessMessage( sealedValue: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (sealedValue.privateAccess.isDefined) { val __value = sealedValue.privateAccess.get @@ -82,17 +78,14 @@ final case class AccessMessage( __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = sealedValue.privateAccess.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -135,7 +128,6 @@ final case class AccessMessage( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def getPrivateAccess: dotty.tools.dotc.semanticdb.PrivateAccess = sealedValue.privateAccess.getOrElse(dotty.tools.dotc.semanticdb.PrivateAccess.defaultInstance) def withPrivateAccess(__v: dotty.tools.dotc.semanticdb.PrivateAccess): AccessMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateAccess(__v)) def getPrivateThisAccess: dotty.tools.dotc.semanticdb.PrivateThisAccess = sealedValue.privateThisAccess.getOrElse(dotty.tools.dotc.semanticdb.PrivateThisAccess.defaultInstance) @@ -158,16 +150,15 @@ final case class AccessMessage( def toAccess: dotty.tools.dotc.semanticdb.Access = dotty.tools.dotc.semanticdb.Access.AccessTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Access]) -} -object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AccessMessage] { +object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AccessMessage]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AccessMessage] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.AccessMessage = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.AccessMessage = var __sealedValue: dotty.tools.dotc.semanticdb.AccessMessage.SealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PrivateAccess(__sealedValue.privateAccess.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.PrivateAccess](_input__))(LiteParser.readMessage(_input__, _))) @@ -184,12 +175,9 @@ object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.do case 58 => __sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.PublicAccess(__sealedValue.publicAccess.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.PublicAccess](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.AccessMessage( sealedValue = __sealedValue ) - } @@ -199,7 +187,7 @@ object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.do lazy val defaultInstance = dotty.tools.dotc.semanticdb.AccessMessage( sealedValue = dotty.tools.dotc.semanticdb.AccessMessage.SealedValue.Empty ) - sealed trait SealedValue extends SemanticdbGeneratedOneof derives CanEqual { + sealed trait SealedValue extends SemanticdbGeneratedOneof derives CanEqual: def isEmpty: _root_.scala.Boolean = false def isDefined: _root_.scala.Boolean = true def isPrivateAccess: _root_.scala.Boolean = false @@ -216,67 +204,57 @@ object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.do def protectedThisAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.ProtectedThisAccess] = _root_.scala.None def protectedWithinAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.ProtectedWithinAccess] = _root_.scala.None def publicAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PublicAccess] = _root_.scala.None - } - object SealedValue { + object SealedValue: @SerialVersionUID(0L) - case object Empty extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue { + case object Empty extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue: type ValueType = _root_.scala.Nothing override def isEmpty: _root_.scala.Boolean = true override def isDefined: _root_.scala.Boolean = false override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") - } @SerialVersionUID(0L) - final case class PrivateAccess(value: dotty.tools.dotc.semanticdb.PrivateAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual { + final case class PrivateAccess(value: dotty.tools.dotc.semanticdb.PrivateAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.PrivateAccess override def isPrivateAccess: _root_.scala.Boolean = true override def privateAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PrivateAccess] = Some(value) override def number: _root_.scala.Int = 1 - } @SerialVersionUID(0L) - final case class PrivateThisAccess(value: dotty.tools.dotc.semanticdb.PrivateThisAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual { + final case class PrivateThisAccess(value: dotty.tools.dotc.semanticdb.PrivateThisAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.PrivateThisAccess override def isPrivateThisAccess: _root_.scala.Boolean = true override def privateThisAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PrivateThisAccess] = Some(value) override def number: _root_.scala.Int = 2 - } @SerialVersionUID(0L) - final case class PrivateWithinAccess(value: dotty.tools.dotc.semanticdb.PrivateWithinAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual { + final case class PrivateWithinAccess(value: dotty.tools.dotc.semanticdb.PrivateWithinAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.PrivateWithinAccess override def isPrivateWithinAccess: _root_.scala.Boolean = true override def privateWithinAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PrivateWithinAccess] = Some(value) override def number: _root_.scala.Int = 3 - } @SerialVersionUID(0L) - final case class ProtectedAccess(value: dotty.tools.dotc.semanticdb.ProtectedAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual { + final case class ProtectedAccess(value: dotty.tools.dotc.semanticdb.ProtectedAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ProtectedAccess override def isProtectedAccess: _root_.scala.Boolean = true override def protectedAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.ProtectedAccess] = Some(value) override def number: _root_.scala.Int = 4 - } @SerialVersionUID(0L) - final case class ProtectedThisAccess(value: dotty.tools.dotc.semanticdb.ProtectedThisAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual { + final case class ProtectedThisAccess(value: dotty.tools.dotc.semanticdb.ProtectedThisAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ProtectedThisAccess override def isProtectedThisAccess: _root_.scala.Boolean = true override def protectedThisAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.ProtectedThisAccess] = Some(value) override def number: _root_.scala.Int = 5 - } @SerialVersionUID(0L) - final case class ProtectedWithinAccess(value: dotty.tools.dotc.semanticdb.ProtectedWithinAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual { + final case class ProtectedWithinAccess(value: dotty.tools.dotc.semanticdb.ProtectedWithinAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ProtectedWithinAccess override def isProtectedWithinAccess: _root_.scala.Boolean = true override def protectedWithinAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.ProtectedWithinAccess] = Some(value) override def number: _root_.scala.Int = 6 - } @SerialVersionUID(0L) - final case class PublicAccess(value: dotty.tools.dotc.semanticdb.PublicAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual { + final case class PublicAccess(value: dotty.tools.dotc.semanticdb.PublicAccess) extends dotty.tools.dotc.semanticdb.AccessMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.PublicAccess override def isPublicAccess: _root_.scala.Boolean = true override def publicAccess: _root_.scala.Option[dotty.tools.dotc.semanticdb.PublicAccess] = Some(value) override def number: _root_.scala.Int = 7 - } - } final val PRIVATE_ACCESS_FIELD_NUMBER = 1 final val PRIVATE_THIS_ACCESS_FIELD_NUMBER = 2 final val PRIVATE_WITHIN_ACCESS_FIELD_NUMBER = 3 @@ -290,11 +268,10 @@ object AccessMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.do sealedValue ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Access]) -} @SerialVersionUID(0L) final case class PrivateAccess( - ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } @@ -303,22 +280,18 @@ final case class PrivateAccess( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateAccess]) -} -object PrivateAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateAccess] { +object PrivateAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateAccess]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateAccess] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PrivateAccess = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PrivateAccess = var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.PrivateAccess( ) - } @@ -331,11 +304,10 @@ object PrivateAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.do ): _root_.dotty.tools.dotc.semanticdb.PrivateAccess = _root_.dotty.tools.dotc.semanticdb.PrivateAccess( ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateAccess]) -} @SerialVersionUID(0L) final case class PrivateThisAccess( - ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } @@ -344,22 +316,18 @@ final case class PrivateThisAccess( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateThisAccess]) -} -object PrivateThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateThisAccess] { +object PrivateThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateThisAccess]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateThisAccess] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PrivateThisAccess = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PrivateThisAccess = var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.PrivateThisAccess( ) - } @@ -372,68 +340,57 @@ object PrivateThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.tool ): _root_.dotty.tools.dotc.semanticdb.PrivateThisAccess = _root_.dotty.tools.dotc.semanticdb.PrivateThisAccess( ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateThisAccess]) -} @SerialVersionUID(0L) final case class PrivateWithinAccess( symbol: _root_.scala.Predef.String = "" - ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = symbol - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = symbol - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(1, __v) - } }; - } def withSymbol(__v: _root_.scala.Predef.String): PrivateWithinAccess = copy(symbol = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PrivateWithinAccess]) -} -object PrivateWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateWithinAccess] { +object PrivateWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateWithinAccess]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateWithinAccess] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PrivateWithinAccess = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PrivateWithinAccess = var __symbol: _root_.scala.Predef.String = "" var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __symbol = _input__.readStringRequireUtf8() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.PrivateWithinAccess( symbol = __symbol ) - } @@ -450,11 +407,10 @@ object PrivateWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty.to symbol ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PrivateWithinAccess]) -} @SerialVersionUID(0L) final case class ProtectedAccess( - ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } @@ -463,22 +419,18 @@ final case class ProtectedAccess( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedAccess]) -} -object ProtectedAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedAccess] { +object ProtectedAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedAccess]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedAccess] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ProtectedAccess = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ProtectedAccess = var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ProtectedAccess( ) - } @@ -491,11 +443,10 @@ object ProtectedAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools. ): _root_.dotty.tools.dotc.semanticdb.ProtectedAccess = _root_.dotty.tools.dotc.semanticdb.ProtectedAccess( ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedAccess]) -} @SerialVersionUID(0L) final case class ProtectedThisAccess( - ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } @@ -504,22 +455,18 @@ final case class ProtectedThisAccess( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedThisAccess]) -} -object ProtectedThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedThisAccess] { +object ProtectedThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedThisAccess]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedThisAccess] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ProtectedThisAccess = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ProtectedThisAccess = var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ProtectedThisAccess( ) - } @@ -532,68 +479,57 @@ object ProtectedThisAccess extends SemanticdbGeneratedMessageCompanion[dotty.to ): _root_.dotty.tools.dotc.semanticdb.ProtectedThisAccess = _root_.dotty.tools.dotc.semanticdb.ProtectedThisAccess( ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedThisAccess]) -} @SerialVersionUID(0L) final case class ProtectedWithinAccess( symbol: _root_.scala.Predef.String = "" - ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = symbol - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = symbol - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(1, __v) - } }; - } def withSymbol(__v: _root_.scala.Predef.String): ProtectedWithinAccess = copy(symbol = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ProtectedWithinAccess]) -} -object ProtectedWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedWithinAccess] { +object ProtectedWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedWithinAccess]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedWithinAccess] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ProtectedWithinAccess = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ProtectedWithinAccess = var __symbol: _root_.scala.Predef.String = "" var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __symbol = _input__.readStringRequireUtf8() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ProtectedWithinAccess( symbol = __symbol ) - } @@ -610,11 +546,10 @@ object ProtectedWithinAccess extends SemanticdbGeneratedMessageCompanion[dotty. symbol ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ProtectedWithinAccess]) -} @SerialVersionUID(0L) final case class PublicAccess( - ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Access.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } @@ -623,22 +558,18 @@ final case class PublicAccess( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.PublicAccess]) -} -object PublicAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PublicAccess] { +object PublicAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PublicAccess]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PublicAccess] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PublicAccess = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.PublicAccess = var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.PublicAccess( ) - } @@ -651,4 +582,3 @@ object PublicAccess extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot ): _root_.dotty.tools.dotc.semanticdb.PublicAccess = _root_.dotty.tools.dotc.semanticdb.PublicAccess( ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.PublicAccess]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala index 2cb478d89e2d..917182ad1ae1 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Annotation.scala @@ -11,65 +11,55 @@ import scala.annotation.internal.sharable @SerialVersionUID(0L) final case class Annotation( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toBase(tpe) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toBase(tpe) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): Annotation = copy(tpe = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Annotation]) -} -object Annotation extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Annotation] { +object Annotation extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Annotation]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Annotation] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Annotation = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Annotation = var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.Annotation( tpe = dotty.tools.dotc.semanticdb.Annotation._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) - } @@ -88,4 +78,3 @@ object Annotation extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. tpe ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Annotation]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala index 0ca96d9ae8c6..f13485353b98 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Constant.scala @@ -8,22 +8,21 @@ package dotty.tools.dotc.semanticdb import dotty.tools.dotc.semanticdb.internal._ import scala.annotation.internal.sharable -sealed trait Constant extends SemanticdbGeneratedSealedOneof derives CanEqual { +sealed trait Constant extends SemanticdbGeneratedSealedOneof derives CanEqual: type MessageType = dotty.tools.dotc.semanticdb.ConstantMessage final def isEmpty = this.isInstanceOf[dotty.tools.dotc.semanticdb.Constant.Empty.type] final def isDefined = !isEmpty final def asMessage: dotty.tools.dotc.semanticdb.ConstantMessage = dotty.tools.dotc.semanticdb.Constant.ConstantTypeMapper.toBase(this) final def asNonEmpty: Option[dotty.tools.dotc.semanticdb.Constant.NonEmpty] = if (isEmpty) None else Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Constant.NonEmpty]) -} -object Constant { +object Constant: case object Empty extends dotty.tools.dotc.semanticdb.Constant sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Constant def defaultInstance: dotty.tools.dotc.semanticdb.Constant = Empty - implicit val ConstantTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] { - override def toCustom(__base: dotty.tools.dotc.semanticdb.ConstantMessage): dotty.tools.dotc.semanticdb.Constant = __base.sealedValue match { + implicit val ConstantTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.ConstantMessage, dotty.tools.dotc.semanticdb.Constant]: + override def toCustom(__base: dotty.tools.dotc.semanticdb.ConstantMessage): dotty.tools.dotc.semanticdb.Constant = __base.sealedValue match case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.UnitConstant => __v.value case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.BooleanConstant => __v.value case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.ByteConstant => __v.value @@ -36,7 +35,6 @@ object Constant { case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.StringConstant => __v.value case __v: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.NullConstant => __v.value case dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty => Empty - } override def toBase(__custom: dotty.tools.dotc.semanticdb.Constant): dotty.tools.dotc.semanticdb.ConstantMessage = dotty.tools.dotc.semanticdb.ConstantMessage(__custom match { case __v: dotty.tools.dotc.semanticdb.UnitConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.UnitConstant(__v) case __v: dotty.tools.dotc.semanticdb.BooleanConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.BooleanConstant(__v) @@ -51,15 +49,13 @@ object Constant { case __v: dotty.tools.dotc.semanticdb.NullConstant => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.NullConstant(__v) case Empty => dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty }) - } -} @SerialVersionUID(0L) final case class ConstantMessage( sealedValue: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (sealedValue.unitConstant.isDefined) { val __value = sealedValue.unitConstant.get @@ -106,17 +102,14 @@ final case class ConstantMessage( __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = sealedValue.unitConstant.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -183,7 +176,6 @@ final case class ConstantMessage( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def getUnitConstant: dotty.tools.dotc.semanticdb.UnitConstant = sealedValue.unitConstant.getOrElse(dotty.tools.dotc.semanticdb.UnitConstant.defaultInstance) def withUnitConstant(__v: dotty.tools.dotc.semanticdb.UnitConstant): ConstantMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.UnitConstant(__v)) def getBooleanConstant: dotty.tools.dotc.semanticdb.BooleanConstant = sealedValue.booleanConstant.getOrElse(dotty.tools.dotc.semanticdb.BooleanConstant.defaultInstance) @@ -214,16 +206,15 @@ final case class ConstantMessage( def toConstant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.Constant.ConstantTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Constant]) -} -object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantMessage] { +object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantMessage]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantMessage] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ConstantMessage = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ConstantMessage = var __sealedValue: dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.UnitConstant(__sealedValue.unitConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.UnitConstant](_input__))(LiteParser.readMessage(_input__, _))) @@ -248,12 +239,9 @@ object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools. case 90 => __sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.NullConstant(__sealedValue.nullConstant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.NullConstant](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ConstantMessage( sealedValue = __sealedValue ) - } @@ -263,7 +251,7 @@ object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools. lazy val defaultInstance = dotty.tools.dotc.semanticdb.ConstantMessage( sealedValue = dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue.Empty ) - sealed trait SealedValue extends SemanticdbGeneratedOneof derives CanEqual { + sealed trait SealedValue extends SemanticdbGeneratedOneof derives CanEqual: def isEmpty: _root_.scala.Boolean = false def isDefined: _root_.scala.Boolean = true def isUnitConstant: _root_.scala.Boolean = false @@ -288,95 +276,81 @@ object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools. def doubleConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.DoubleConstant] = _root_.scala.None def stringConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.StringConstant] = _root_.scala.None def nullConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.NullConstant] = _root_.scala.None - } - object SealedValue { + object SealedValue: @SerialVersionUID(0L) - case object Empty extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue { + case object Empty extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue: type ValueType = _root_.scala.Nothing override def isEmpty: _root_.scala.Boolean = true override def isDefined: _root_.scala.Boolean = false override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") - } @SerialVersionUID(0L) - final case class UnitConstant(value: dotty.tools.dotc.semanticdb.UnitConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { + final case class UnitConstant(value: dotty.tools.dotc.semanticdb.UnitConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.UnitConstant override def isUnitConstant: _root_.scala.Boolean = true override def unitConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.UnitConstant] = Some(value) override def number: _root_.scala.Int = 1 - } @SerialVersionUID(0L) - final case class BooleanConstant(value: dotty.tools.dotc.semanticdb.BooleanConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { + final case class BooleanConstant(value: dotty.tools.dotc.semanticdb.BooleanConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.BooleanConstant override def isBooleanConstant: _root_.scala.Boolean = true override def booleanConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.BooleanConstant] = Some(value) override def number: _root_.scala.Int = 2 - } @SerialVersionUID(0L) - final case class ByteConstant(value: dotty.tools.dotc.semanticdb.ByteConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { + final case class ByteConstant(value: dotty.tools.dotc.semanticdb.ByteConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ByteConstant override def isByteConstant: _root_.scala.Boolean = true override def byteConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.ByteConstant] = Some(value) override def number: _root_.scala.Int = 3 - } @SerialVersionUID(0L) - final case class ShortConstant(value: dotty.tools.dotc.semanticdb.ShortConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { + final case class ShortConstant(value: dotty.tools.dotc.semanticdb.ShortConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ShortConstant override def isShortConstant: _root_.scala.Boolean = true override def shortConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.ShortConstant] = Some(value) override def number: _root_.scala.Int = 4 - } @SerialVersionUID(0L) - final case class CharConstant(value: dotty.tools.dotc.semanticdb.CharConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { + final case class CharConstant(value: dotty.tools.dotc.semanticdb.CharConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.CharConstant override def isCharConstant: _root_.scala.Boolean = true override def charConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.CharConstant] = Some(value) override def number: _root_.scala.Int = 5 - } @SerialVersionUID(0L) - final case class IntConstant(value: dotty.tools.dotc.semanticdb.IntConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { + final case class IntConstant(value: dotty.tools.dotc.semanticdb.IntConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.IntConstant override def isIntConstant: _root_.scala.Boolean = true override def intConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.IntConstant] = Some(value) override def number: _root_.scala.Int = 6 - } @SerialVersionUID(0L) - final case class LongConstant(value: dotty.tools.dotc.semanticdb.LongConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { + final case class LongConstant(value: dotty.tools.dotc.semanticdb.LongConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.LongConstant override def isLongConstant: _root_.scala.Boolean = true override def longConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.LongConstant] = Some(value) override def number: _root_.scala.Int = 7 - } @SerialVersionUID(0L) - final case class FloatConstant(value: dotty.tools.dotc.semanticdb.FloatConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { + final case class FloatConstant(value: dotty.tools.dotc.semanticdb.FloatConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.FloatConstant override def isFloatConstant: _root_.scala.Boolean = true override def floatConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.FloatConstant] = Some(value) override def number: _root_.scala.Int = 8 - } @SerialVersionUID(0L) - final case class DoubleConstant(value: dotty.tools.dotc.semanticdb.DoubleConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { + final case class DoubleConstant(value: dotty.tools.dotc.semanticdb.DoubleConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.DoubleConstant override def isDoubleConstant: _root_.scala.Boolean = true override def doubleConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.DoubleConstant] = Some(value) override def number: _root_.scala.Int = 9 - } @SerialVersionUID(0L) - final case class StringConstant(value: dotty.tools.dotc.semanticdb.StringConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { + final case class StringConstant(value: dotty.tools.dotc.semanticdb.StringConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.StringConstant override def isStringConstant: _root_.scala.Boolean = true override def stringConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.StringConstant] = Some(value) override def number: _root_.scala.Int = 10 - } @SerialVersionUID(0L) - final case class NullConstant(value: dotty.tools.dotc.semanticdb.NullConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual { + final case class NullConstant(value: dotty.tools.dotc.semanticdb.NullConstant) extends dotty.tools.dotc.semanticdb.ConstantMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.NullConstant override def isNullConstant: _root_.scala.Boolean = true override def nullConstant: _root_.scala.Option[dotty.tools.dotc.semanticdb.NullConstant] = Some(value) override def number: _root_.scala.Int = 11 - } - } final val UNIT_CONSTANT_FIELD_NUMBER = 1 final val BOOLEAN_CONSTANT_FIELD_NUMBER = 2 final val BYTE_CONSTANT_FIELD_NUMBER = 3 @@ -394,11 +368,10 @@ object ConstantMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools. sealedValue ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Constant]) -} @SerialVersionUID(0L) final case class UnitConstant( - ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } @@ -407,22 +380,18 @@ final case class UnitConstant( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UnitConstant]) -} -object UnitConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnitConstant] { +object UnitConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnitConstant]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnitConstant] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.UnitConstant = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.UnitConstant = var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.UnitConstant( ) - } @@ -435,68 +404,57 @@ object UnitConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot ): _root_.dotty.tools.dotc.semanticdb.UnitConstant = _root_.dotty.tools.dotc.semanticdb.UnitConstant( ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnitConstant]) -} @SerialVersionUID(0L) final case class BooleanConstant( value: _root_.scala.Boolean = false - ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = value - if (__value != false) { + if (__value != false) __size += SemanticdbOutputStream.computeBoolSize(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = value - if (__v != false) { + if (__v != false) _output__.writeBool(1, __v) - } }; - } def withValue(__v: _root_.scala.Boolean): BooleanConstant = copy(value = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.BooleanConstant]) -} -object BooleanConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.BooleanConstant] { +object BooleanConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.BooleanConstant]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.BooleanConstant] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.BooleanConstant = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.BooleanConstant = var __value: _root_.scala.Boolean = false var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 8 => __value = _input__.readBool() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.BooleanConstant( value = __value ) - } @@ -513,68 +471,57 @@ object BooleanConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools. value ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.BooleanConstant]) -} @SerialVersionUID(0L) final case class ByteConstant( value: _root_.scala.Int = 0 - ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = value - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeInt32Size(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = value - if (__v != 0) { + if (__v != 0) _output__.writeInt32(1, __v) - } }; - } def withValue(__v: _root_.scala.Int): ByteConstant = copy(value = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ByteConstant]) -} -object ByteConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByteConstant] { +object ByteConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByteConstant]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByteConstant] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ByteConstant = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ByteConstant = var __value: _root_.scala.Int = 0 var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 8 => __value = _input__.readInt32() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ByteConstant( value = __value ) - } @@ -591,68 +538,57 @@ object ByteConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot value ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByteConstant]) -} @SerialVersionUID(0L) final case class ShortConstant( value: _root_.scala.Int = 0 - ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = value - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeInt32Size(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = value - if (__v != 0) { + if (__v != 0) _output__.writeInt32(1, __v) - } }; - } def withValue(__v: _root_.scala.Int): ShortConstant = copy(value = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ShortConstant]) -} -object ShortConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ShortConstant] { +object ShortConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ShortConstant]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ShortConstant] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ShortConstant = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ShortConstant = var __value: _root_.scala.Int = 0 var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 8 => __value = _input__.readInt32() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ShortConstant( value = __value ) - } @@ -669,68 +605,57 @@ object ShortConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.do value ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ShortConstant]) -} @SerialVersionUID(0L) final case class CharConstant( value: _root_.scala.Int = 0 - ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = value - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeInt32Size(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = value - if (__v != 0) { + if (__v != 0) _output__.writeInt32(1, __v) - } }; - } def withValue(__v: _root_.scala.Int): CharConstant = copy(value = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.CharConstant]) -} -object CharConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.CharConstant] { +object CharConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.CharConstant]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.CharConstant] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.CharConstant = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.CharConstant = var __value: _root_.scala.Int = 0 var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 8 => __value = _input__.readInt32() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.CharConstant( value = __value ) - } @@ -747,68 +672,57 @@ object CharConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot value ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.CharConstant]) -} @SerialVersionUID(0L) final case class IntConstant( value: _root_.scala.Int = 0 - ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = value - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeInt32Size(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = value - if (__v != 0) { + if (__v != 0) _output__.writeInt32(1, __v) - } }; - } def withValue(__v: _root_.scala.Int): IntConstant = copy(value = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IntConstant]) -} -object IntConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntConstant] { +object IntConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntConstant]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntConstant] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.IntConstant = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.IntConstant = var __value: _root_.scala.Int = 0 var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 8 => __value = _input__.readInt32() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.IntConstant( value = __value ) - } @@ -825,68 +739,57 @@ object IntConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc value ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntConstant]) -} @SerialVersionUID(0L) final case class LongConstant( value: _root_.scala.Long = 0L - ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = value - if (__value != 0L) { + if (__value != 0L) __size += SemanticdbOutputStream.computeInt64Size(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = value - if (__v != 0L) { + if (__v != 0L) _output__.writeInt64(1, __v) - } }; - } def withValue(__v: _root_.scala.Long): LongConstant = copy(value = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LongConstant]) -} -object LongConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LongConstant] { +object LongConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LongConstant]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LongConstant] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.LongConstant = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.LongConstant = var __value: _root_.scala.Long = 0L var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 8 => __value = _input__.readInt64() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.LongConstant( value = __value ) - } @@ -903,68 +806,57 @@ object LongConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot value ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LongConstant]) -} @SerialVersionUID(0L) final case class FloatConstant( value: _root_.scala.Float = 0.0f - ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = value - if (__value != 0.0f) { + if (__value != 0.0f) __size += SemanticdbOutputStream.computeFloatSize(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = value - if (__v != 0.0f) { + if (__v != 0.0f) _output__.writeFloat(1, __v) - } }; - } def withValue(__v: _root_.scala.Float): FloatConstant = copy(value = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.FloatConstant]) -} -object FloatConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FloatConstant] { +object FloatConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FloatConstant]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FloatConstant] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.FloatConstant = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.FloatConstant = var __value: _root_.scala.Float = 0.0f var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 13 => __value = _input__.readFloat() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.FloatConstant( value = __value ) - } @@ -981,68 +873,57 @@ object FloatConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.do value ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FloatConstant]) -} @SerialVersionUID(0L) final case class DoubleConstant( value: _root_.scala.Double = 0.0 - ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = value - if (__value != 0.0) { + if (__value != 0.0) __size += SemanticdbOutputStream.computeDoubleSize(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = value - if (__v != 0.0) { + if (__v != 0.0) _output__.writeDouble(1, __v) - } }; - } def withValue(__v: _root_.scala.Double): DoubleConstant = copy(value = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.DoubleConstant]) -} -object DoubleConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.DoubleConstant] { +object DoubleConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.DoubleConstant]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.DoubleConstant] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.DoubleConstant = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.DoubleConstant = var __value: _root_.scala.Double = 0.0 var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 9 => __value = _input__.readDouble() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.DoubleConstant( value = __value ) - } @@ -1059,68 +940,57 @@ object DoubleConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.d value ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.DoubleConstant]) -} @SerialVersionUID(0L) final case class StringConstant( value: _root_.scala.Predef.String = "" - ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = value - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = value - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(1, __v) - } }; - } def withValue(__v: _root_.scala.Predef.String): StringConstant = copy(value = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.StringConstant]) -} -object StringConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StringConstant] { +object StringConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StringConstant]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StringConstant] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.StringConstant = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.StringConstant = var __value: _root_.scala.Predef.String = "" var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __value = _input__.readStringRequireUtf8() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.StringConstant( value = __value ) - } @@ -1137,11 +1007,10 @@ object StringConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.d value ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StringConstant]) -} @SerialVersionUID(0L) final case class NullConstant( - ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Constant.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: final override def serializedSize: _root_.scala.Int = 0 def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { } @@ -1150,22 +1019,18 @@ final case class NullConstant( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.NullConstant]) -} -object NullConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.NullConstant] { +object NullConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.NullConstant]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.NullConstant] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.NullConstant = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.NullConstant = var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.NullConstant( ) - } @@ -1178,4 +1043,3 @@ object NullConstant extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot ): _root_.dotty.tools.dotc.semanticdb.NullConstant = _root_.dotty.tools.dotc.semanticdb.NullConstant( ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.NullConstant]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala index cc8aa82bf8ea..a058f792f5d6 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Diagnostic.scala @@ -13,10 +13,10 @@ final case class Diagnostic( range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None, severity: dotty.tools.dotc.semanticdb.Diagnostic.Severity = dotty.tools.dotc.semanticdb.Diagnostic.Severity.UNKNOWN_SEVERITY, message: _root_.scala.Predef.String = "" - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (range.isDefined) { val __value = range.get @@ -25,29 +25,24 @@ final case class Diagnostic( { val __value = severity.value - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeEnumSize(2, __value) - } }; { val __value = message - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(3, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = range.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -56,17 +51,14 @@ final case class Diagnostic( }; { val __v = severity.value - if (__v != 0) { + if (__v != 0) _output__.writeEnum(2, __v) - } }; { val __v = message - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(3, __v) - } }; - } def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance) def clearRange: Diagnostic = copy(range = _root_.scala.None) def withRange(__v: dotty.tools.dotc.semanticdb.Range): Diagnostic = copy(range = Option(__v)) @@ -77,18 +69,17 @@ final case class Diagnostic( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Diagnostic]) -} -object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Diagnostic] { +object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Diagnostic]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Diagnostic] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Diagnostic = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Diagnostic = var __range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None var __severity: dotty.tools.dotc.semanticdb.Diagnostic.Severity = dotty.tools.dotc.semanticdb.Diagnostic.Severity.UNKNOWN_SEVERITY var __message: _root_.scala.Predef.String = "" var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __range = Option(__range.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Range](_input__))(LiteParser.readMessage(_input__, _))) @@ -97,14 +88,11 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. case 26 => __message = _input__.readStringRequireUtf8() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.Diagnostic( range = __range, severity = __severity, message = __message ) - } @@ -116,7 +104,7 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. severity = dotty.tools.dotc.semanticdb.Diagnostic.Severity.UNKNOWN_SEVERITY, message = "" ) - sealed abstract class Severity(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { + sealed abstract class Severity(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual: type EnumType = Severity def isUnknownSeverity: _root_.scala.Boolean = false def isError: _root_.scala.Boolean = false @@ -125,61 +113,53 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. def isHint: _root_.scala.Boolean = false final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Diagnostic.Severity.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Diagnostic.Severity.Recognized]) - } - object Severity { + object Severity: sealed trait Recognized extends Severity @SerialVersionUID(0L) - case object UNKNOWN_SEVERITY extends Severity(0) with Severity.Recognized { + case object UNKNOWN_SEVERITY extends Severity(0) with Severity.Recognized: val index = 0 val name = "UNKNOWN_SEVERITY" override def isUnknownSeverity: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object ERROR extends Severity(1) with Severity.Recognized { + case object ERROR extends Severity(1) with Severity.Recognized: val index = 1 val name = "ERROR" override def isError: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object WARNING extends Severity(2) with Severity.Recognized { + case object WARNING extends Severity(2) with Severity.Recognized: val index = 2 val name = "WARNING" override def isWarning: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object INFORMATION extends Severity(3) with Severity.Recognized { + case object INFORMATION extends Severity(3) with Severity.Recognized: val index = 3 val name = "INFORMATION" override def isInformation: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object HINT extends Severity(4) with Severity.Recognized { + case object HINT extends Severity(4) with Severity.Recognized: val index = 4 val name = "HINT" override def isHint: _root_.scala.Boolean = true - } @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Severity(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_SEVERITY, ERROR, WARNING, INFORMATION, HINT) - def fromValue(__value: _root_.scala.Int): Severity = __value match { + def fromValue(__value: _root_.scala.Int): Severity = __value match case 0 => UNKNOWN_SEVERITY case 1 => ERROR case 2 => WARNING case 3 => INFORMATION case 4 => HINT case __other => Unrecognized(__other) - } - } final val RANGE_FIELD_NUMBER = 1 final val SEVERITY_FIELD_NUMBER = 2 final val MESSAGE_FIELD_NUMBER = 3 @@ -193,4 +173,3 @@ object Diagnostic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. message ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Diagnostic]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala index 07fbda4991af..2737bbe518f2 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Documentation.scala @@ -12,50 +12,42 @@ import scala.annotation.internal.sharable final case class Documentation( message: _root_.scala.Predef.String = "", format: dotty.tools.dotc.semanticdb.Documentation.Format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = message - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(1, __value) - } }; { val __value = format.value - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeEnumSize(2, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = message - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(1, __v) - } }; { val __v = format.value - if (__v != 0) { + if (__v != 0) _output__.writeEnum(2, __v) - } }; - } def withMessage(__v: _root_.scala.Predef.String): Documentation = copy(message = __v) def withFormat(__v: dotty.tools.dotc.semanticdb.Documentation.Format): Documentation = copy(format = __v) @@ -63,30 +55,26 @@ final case class Documentation( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Documentation]) -} -object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Documentation] { +object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Documentation]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Documentation] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Documentation = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Documentation = var __message: _root_.scala.Predef.String = "" var __format: dotty.tools.dotc.semanticdb.Documentation.Format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __message = _input__.readStringRequireUtf8() case 16 => __format = dotty.tools.dotc.semanticdb.Documentation.Format.fromValue(_input__.readEnum()) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.Documentation( message = __message, format = __format ) - } @@ -97,7 +85,7 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do message = "", format = dotty.tools.dotc.semanticdb.Documentation.Format.HTML ) - sealed abstract class Format(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { + sealed abstract class Format(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual: type EnumType = Format def isHtml: _root_.scala.Boolean = false def isMarkdown: _root_.scala.Boolean = false @@ -106,61 +94,53 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do def isKdoc: _root_.scala.Boolean = false final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation.Format.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Documentation.Format.Recognized]) - } - object Format { + object Format: sealed trait Recognized extends Format @SerialVersionUID(0L) - case object HTML extends Format(0) with Format.Recognized { + case object HTML extends Format(0) with Format.Recognized: val index = 0 val name = "HTML" override def isHtml: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object MARKDOWN extends Format(1) with Format.Recognized { + case object MARKDOWN extends Format(1) with Format.Recognized: val index = 1 val name = "MARKDOWN" override def isMarkdown: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object JAVADOC extends Format(2) with Format.Recognized { + case object JAVADOC extends Format(2) with Format.Recognized: val index = 2 val name = "JAVADOC" override def isJavadoc: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object SCALADOC extends Format(3) with Format.Recognized { + case object SCALADOC extends Format(3) with Format.Recognized: val index = 3 val name = "SCALADOC" override def isScaladoc: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object KDOC extends Format(4) with Format.Recognized { + case object KDOC extends Format(4) with Format.Recognized: val index = 4 val name = "KDOC" override def isKdoc: _root_.scala.Boolean = true - } @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Format(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(HTML, MARKDOWN, JAVADOC, SCALADOC, KDOC) - def fromValue(__value: _root_.scala.Int): Format = __value match { + def fromValue(__value: _root_.scala.Int): Format = __value match case 0 => HTML case 1 => MARKDOWN case 2 => JAVADOC case 3 => SCALADOC case 4 => KDOC case __other => Unrecognized(__other) - } - } final val MESSAGE_FIELD_NUMBER = 1 final val FORMAT_FIELD_NUMBER = 2 def of( @@ -171,4 +151,3 @@ object Documentation extends SemanticdbGeneratedMessageCompanion[dotty.tools.do format ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Documentation]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala index c57a3d3cddc3..c9f37d32a366 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Language.scala @@ -8,49 +8,43 @@ package dotty.tools.dotc.semanticdb import dotty.tools.dotc.semanticdb.internal._ import scala.annotation.internal.sharable -sealed abstract class Language(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { +sealed abstract class Language(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual: type EnumType = Language def isUnknownLanguage: _root_.scala.Boolean = false def isScala: _root_.scala.Boolean = false def isJava: _root_.scala.Boolean = false final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Language.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Language.Recognized]) -} -object Language { +object Language: sealed trait Recognized extends Language @SerialVersionUID(0L) - case object UNKNOWN_LANGUAGE extends Language(0) with Language.Recognized { + case object UNKNOWN_LANGUAGE extends Language(0) with Language.Recognized: val index = 0 val name = "UNKNOWN_LANGUAGE" override def isUnknownLanguage: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object SCALA extends Language(1) with Language.Recognized { + case object SCALA extends Language(1) with Language.Recognized: val index = 1 val name = "SCALA" override def isScala: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object JAVA extends Language(2) with Language.Recognized { + case object JAVA extends Language(2) with Language.Recognized: val index = 2 val name = "JAVA" override def isJava: _root_.scala.Boolean = true - } @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Language(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_LANGUAGE, SCALA, JAVA) - def fromValue(__value: _root_.scala.Int): Language = __value match { + def fromValue(__value: _root_.scala.Int): Language = __value match case 0 => UNKNOWN_LANGUAGE case 1 => SCALA case 2 => JAVA case __other => Unrecognized(__other) - } -} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala index a3667e944ae4..a57ccfae9d2d 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Location.scala @@ -12,39 +12,34 @@ import scala.annotation.internal.sharable final case class Location( uri: _root_.scala.Predef.String = "", range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = uri - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(1, __value) - } }; if (range.isDefined) { val __value = range.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = uri - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(1, __v) - } }; range.foreach { __v => val __m = __v @@ -52,7 +47,6 @@ final case class Location( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def withUri(__v: _root_.scala.Predef.String): Location = copy(uri = __v) def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance) def clearRange: Location = copy(range = _root_.scala.None) @@ -62,30 +56,26 @@ final case class Location( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Location]) -} -object Location extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Location] { +object Location extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Location]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Location] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Location = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Location = var __uri: _root_.scala.Predef.String = "" var __range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __uri = _input__.readStringRequireUtf8() case 18 => __range = Option(__range.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Range](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.Location( uri = __uri, range = __range ) - } @@ -106,4 +96,3 @@ object Location extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se range ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Location]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala index d273664bdf6a..9666870596e3 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Range.scala @@ -14,76 +14,64 @@ final case class Range( startCharacter: _root_.scala.Int = 0, endLine: _root_.scala.Int = 0, endCharacter: _root_.scala.Int = 0 - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = startLine - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeInt32Size(1, __value) - } }; { val __value = startCharacter - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeInt32Size(2, __value) - } }; { val __value = endLine - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeInt32Size(3, __value) - } }; { val __value = endCharacter - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeInt32Size(4, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = startLine - if (__v != 0) { + if (__v != 0) _output__.writeInt32(1, __v) - } }; { val __v = startCharacter - if (__v != 0) { + if (__v != 0) _output__.writeInt32(2, __v) - } }; { val __v = endLine - if (__v != 0) { + if (__v != 0) _output__.writeInt32(3, __v) - } }; { val __v = endCharacter - if (__v != 0) { + if (__v != 0) _output__.writeInt32(4, __v) - } }; - } def withStartLine(__v: _root_.scala.Int): Range = copy(startLine = __v) def withStartCharacter(__v: _root_.scala.Int): Range = copy(startCharacter = __v) def withEndLine(__v: _root_.scala.Int): Range = copy(endLine = __v) @@ -93,19 +81,18 @@ final case class Range( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Range]) -} -object Range extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Range] { +object Range extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Range]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Range] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Range = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Range = var __startLine: _root_.scala.Int = 0 var __startCharacter: _root_.scala.Int = 0 var __endLine: _root_.scala.Int = 0 var __endCharacter: _root_.scala.Int = 0 var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 8 => __startLine = _input__.readInt32() @@ -116,15 +103,12 @@ object Range extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.seman case 32 => __endCharacter = _input__.readInt32() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.Range( startLine = __startLine, startCharacter = __startCharacter, endLine = __endLine, endCharacter = __endCharacter ) - } @@ -153,4 +137,3 @@ object Range extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.seman endCharacter ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Range]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala index 841e69166feb..9e49daea36fa 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Schema.scala @@ -8,49 +8,43 @@ package dotty.tools.dotc.semanticdb import dotty.tools.dotc.semanticdb.internal._ import scala.annotation.internal.sharable -sealed abstract class Schema(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { +sealed abstract class Schema(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual: type EnumType = Schema def isLegacy: _root_.scala.Boolean = false def isSemanticdb3: _root_.scala.Boolean = false def isSemanticdb4: _root_.scala.Boolean = false final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.Schema.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Schema.Recognized]) -} -object Schema { +object Schema: sealed trait Recognized extends Schema @SerialVersionUID(0L) - case object LEGACY extends Schema(0) with Schema.Recognized { + case object LEGACY extends Schema(0) with Schema.Recognized: val index = 0 val name = "LEGACY" override def isLegacy: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object SEMANTICDB3 extends Schema(3) with Schema.Recognized { + case object SEMANTICDB3 extends Schema(3) with Schema.Recognized: val index = 1 val name = "SEMANTICDB3" override def isSemanticdb3: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object SEMANTICDB4 extends Schema(4) with Schema.Recognized { + case object SEMANTICDB4 extends Schema(4) with Schema.Recognized: val index = 2 val name = "SEMANTICDB4" override def isSemanticdb4: _root_.scala.Boolean = true - } @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Schema(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(LEGACY, SEMANTICDB3, SEMANTICDB4) - def fromValue(__value: _root_.scala.Int): Schema = __value match { + def fromValue(__value: _root_.scala.Int): Schema = __value match case 0 => LEGACY case 3 => SEMANTICDB3 case 4 => SEMANTICDB4 case __other => Unrecognized(__other) - } -} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala index 655ebe75185e..08e7399a274a 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Scope.scala @@ -12,10 +12,10 @@ import scala.annotation.internal.sharable final case class Scope( symlinks: _root_.scala.Seq[_root_.scala.Predef.String] = _root_.scala.Seq.empty, hardlinks: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolInformation] = _root_.scala.Seq.empty - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 symlinks.foreach { __item => val __value = __item @@ -26,17 +26,14 @@ final case class Scope( __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = symlinks.foreach { __v => val __m = __v _output__.writeString(1, __m) @@ -47,7 +44,6 @@ final case class Scope( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def clearSymlinks = copy(symlinks = _root_.scala.Seq.empty) def addSymlinks(__vs: _root_.scala.Predef.String *): Scope = addAllSymlinks(__vs) def addAllSymlinks(__vs: Iterable[_root_.scala.Predef.String]): Scope = copy(symlinks = symlinks ++ __vs) @@ -61,30 +57,26 @@ final case class Scope( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Scope]) -} -object Scope extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Scope] { +object Scope extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Scope]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Scope] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Scope = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Scope = val __symlinks: _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Predef.String] = new _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Predef.String] val __hardlinks: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.SymbolInformation] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.SymbolInformation] var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __symlinks += _input__.readStringRequireUtf8() case 18 => __hardlinks += LiteParser.readMessage[dotty.tools.dotc.semanticdb.SymbolInformation](_input__) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.Scope( symlinks = __symlinks.result(), hardlinks = __hardlinks.result() ) - } @@ -105,4 +97,3 @@ object Scope extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.seman hardlinks ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Scope]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala index 228e2f02349b..67a9dc3eb6ae 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Signature.scala @@ -8,28 +8,26 @@ package dotty.tools.dotc.semanticdb import dotty.tools.dotc.semanticdb.internal._ import scala.annotation.internal.sharable -sealed trait Signature extends SemanticdbGeneratedSealedOneof derives CanEqual { +sealed trait Signature extends SemanticdbGeneratedSealedOneof derives CanEqual: type MessageType = dotty.tools.dotc.semanticdb.SignatureMessage final def isEmpty = this.isInstanceOf[dotty.tools.dotc.semanticdb.Signature.Empty.type] final def isDefined = !isEmpty final def asMessage: dotty.tools.dotc.semanticdb.SignatureMessage = dotty.tools.dotc.semanticdb.Signature.SignatureTypeMapper.toBase(this) final def asNonEmpty: Option[dotty.tools.dotc.semanticdb.Signature.NonEmpty] = if (isEmpty) None else Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Signature.NonEmpty]) -} -object Signature { +object Signature: case object Empty extends dotty.tools.dotc.semanticdb.Signature sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Signature def defaultInstance: dotty.tools.dotc.semanticdb.Signature = Empty - implicit val SignatureTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] { - override def toCustom(__base: dotty.tools.dotc.semanticdb.SignatureMessage): dotty.tools.dotc.semanticdb.Signature = __base.sealedValue match { + implicit val SignatureTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.SignatureMessage, dotty.tools.dotc.semanticdb.Signature]: + override def toCustom(__base: dotty.tools.dotc.semanticdb.SignatureMessage): dotty.tools.dotc.semanticdb.Signature = __base.sealedValue match case __v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ClassSignature => __v.value case __v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.MethodSignature => __v.value case __v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.TypeSignature => __v.value case __v: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ValueSignature => __v.value case dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty => Empty - } override def toBase(__custom: dotty.tools.dotc.semanticdb.Signature): dotty.tools.dotc.semanticdb.SignatureMessage = dotty.tools.dotc.semanticdb.SignatureMessage(__custom match { case __v: dotty.tools.dotc.semanticdb.ClassSignature => dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ClassSignature(__v) case __v: dotty.tools.dotc.semanticdb.MethodSignature => dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.MethodSignature(__v) @@ -37,15 +35,13 @@ object Signature { case __v: dotty.tools.dotc.semanticdb.ValueSignature => dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ValueSignature(__v) case Empty => dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty }) - } -} @SerialVersionUID(0L) final case class SignatureMessage( sealedValue: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (sealedValue.classSignature.isDefined) { val __value = sealedValue.classSignature.get @@ -64,17 +60,14 @@ final case class SignatureMessage( __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = sealedValue.classSignature.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -99,7 +92,6 @@ final case class SignatureMessage( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def getClassSignature: dotty.tools.dotc.semanticdb.ClassSignature = sealedValue.classSignature.getOrElse(dotty.tools.dotc.semanticdb.ClassSignature.defaultInstance) def withClassSignature(__v: dotty.tools.dotc.semanticdb.ClassSignature): SignatureMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ClassSignature(__v)) def getMethodSignature: dotty.tools.dotc.semanticdb.MethodSignature = sealedValue.methodSignature.getOrElse(dotty.tools.dotc.semanticdb.MethodSignature.defaultInstance) @@ -116,16 +108,15 @@ final case class SignatureMessage( def toSignature: dotty.tools.dotc.semanticdb.Signature = dotty.tools.dotc.semanticdb.Signature.SignatureTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Signature]) -} -object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SignatureMessage] { +object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SignatureMessage]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SignatureMessage] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SignatureMessage = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SignatureMessage = var __sealedValue: dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ClassSignature(__sealedValue.classSignature.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ClassSignature](_input__))(LiteParser.readMessage(_input__, _))) @@ -136,12 +127,9 @@ object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools case 34 => __sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.ValueSignature(__sealedValue.valueSignature.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ValueSignature](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.SignatureMessage( sealedValue = __sealedValue ) - } @@ -151,7 +139,7 @@ object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools lazy val defaultInstance = dotty.tools.dotc.semanticdb.SignatureMessage( sealedValue = dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue.Empty ) - sealed trait SealedValue extends SemanticdbGeneratedOneof derives CanEqual { + sealed trait SealedValue extends SemanticdbGeneratedOneof derives CanEqual: def isEmpty: _root_.scala.Boolean = false def isDefined: _root_.scala.Boolean = true def isClassSignature: _root_.scala.Boolean = false @@ -162,46 +150,39 @@ object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools def methodSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.MethodSignature] = _root_.scala.None def typeSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeSignature] = _root_.scala.None def valueSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.ValueSignature] = _root_.scala.None - } - object SealedValue { + object SealedValue: @SerialVersionUID(0L) - case object Empty extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue { + case object Empty extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue: type ValueType = _root_.scala.Nothing override def isEmpty: _root_.scala.Boolean = true override def isDefined: _root_.scala.Boolean = false override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") - } @SerialVersionUID(0L) - final case class ClassSignature(value: dotty.tools.dotc.semanticdb.ClassSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue derives CanEqual { + final case class ClassSignature(value: dotty.tools.dotc.semanticdb.ClassSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ClassSignature override def isClassSignature: _root_.scala.Boolean = true override def classSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.ClassSignature] = Some(value) override def number: _root_.scala.Int = 1 - } @SerialVersionUID(0L) - final case class MethodSignature(value: dotty.tools.dotc.semanticdb.MethodSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue derives CanEqual { + final case class MethodSignature(value: dotty.tools.dotc.semanticdb.MethodSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.MethodSignature override def isMethodSignature: _root_.scala.Boolean = true override def methodSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.MethodSignature] = Some(value) override def number: _root_.scala.Int = 2 - } @SerialVersionUID(0L) - final case class TypeSignature(value: dotty.tools.dotc.semanticdb.TypeSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue derives CanEqual { + final case class TypeSignature(value: dotty.tools.dotc.semanticdb.TypeSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.TypeSignature override def isTypeSignature: _root_.scala.Boolean = true override def typeSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeSignature] = Some(value) override def number: _root_.scala.Int = 3 - } @SerialVersionUID(0L) - final case class ValueSignature(value: dotty.tools.dotc.semanticdb.ValueSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue derives CanEqual { + final case class ValueSignature(value: dotty.tools.dotc.semanticdb.ValueSignature) extends dotty.tools.dotc.semanticdb.SignatureMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ValueSignature override def isValueSignature: _root_.scala.Boolean = true override def valueSignature: _root_.scala.Option[dotty.tools.dotc.semanticdb.ValueSignature] = Some(value) override def number: _root_.scala.Int = 4 - } - } final val CLASS_SIGNATURE_FIELD_NUMBER = 1 final val METHOD_SIGNATURE_FIELD_NUMBER = 2 final val TYPE_SIGNATURE_FIELD_NUMBER = 3 @@ -212,7 +193,6 @@ object SignatureMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools sealedValue ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Signature]) -} @SerialVersionUID(0L) final case class ClassSignature( @@ -220,10 +200,10 @@ final case class ClassSignature( parents: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty, self: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None - ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -236,26 +216,22 @@ final case class ClassSignature( { val __value = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toBase(self) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; if (declarations.isDefined) { val __value = declarations.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = typeParameters.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -270,11 +246,10 @@ final case class ClassSignature( }; { val __v = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toBase(self) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(3, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; declarations.foreach { __v => val __m = __v @@ -282,7 +257,6 @@ final case class ClassSignature( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def getTypeParameters: dotty.tools.dotc.semanticdb.Scope = typeParameters.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearTypeParameters: ClassSignature = copy(typeParameters = _root_.scala.None) def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): ClassSignature = copy(typeParameters = Option(__v)) @@ -299,19 +273,18 @@ final case class ClassSignature( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ClassSignature]) -} -object ClassSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ClassSignature] { +object ClassSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ClassSignature]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ClassSignature] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ClassSignature = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ClassSignature = var __typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None val __parents: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] var __self: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var __declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __typeParameters = Option(__typeParameters.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _))) @@ -322,15 +295,12 @@ object ClassSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.d case 34 => __declarations = Option(__declarations.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ClassSignature( typeParameters = __typeParameters, parents = __parents.result(), self = dotty.tools.dotc.semanticdb.ClassSignature._typemapper_self.toCustom(__self.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)), declarations = __declarations ) - } @@ -363,17 +333,16 @@ object ClassSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.d declarations ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ClassSignature]) -} @SerialVersionUID(0L) final case class MethodSignature( typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None, parameterLists: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.Seq.empty, returnType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -386,22 +355,18 @@ final case class MethodSignature( { val __value = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toBase(returnType) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = typeParameters.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -416,13 +381,11 @@ final case class MethodSignature( }; { val __v = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toBase(returnType) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(3, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def getTypeParameters: dotty.tools.dotc.semanticdb.Scope = typeParameters.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearTypeParameters: MethodSignature = copy(typeParameters = _root_.scala.None) def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): MethodSignature = copy(typeParameters = Option(__v)) @@ -436,18 +399,17 @@ final case class MethodSignature( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MethodSignature]) -} -object MethodSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MethodSignature] { +object MethodSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MethodSignature]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MethodSignature] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MethodSignature = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MethodSignature = var __typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None val __parameterLists: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Scope] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Scope] var __returnType: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __typeParameters = Option(__typeParameters.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _))) @@ -456,14 +418,11 @@ object MethodSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools. case 26 => __returnType = _root_.scala.Some(__returnType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.MethodSignature( typeParameters = __typeParameters, parameterLists = __parameterLists.result(), returnType = dotty.tools.dotc.semanticdb.MethodSignature._typemapper_returnType.toCustom(__returnType.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) - } @@ -490,17 +449,16 @@ object MethodSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools. returnType ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MethodSignature]) -} @SerialVersionUID(0L) final case class TypeSignature( typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None, lowerBound: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), upperBound: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -509,29 +467,24 @@ final case class TypeSignature( { val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toBase(lowerBound) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; { val __value = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toBase(upperBound) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = typeParameters.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -540,21 +493,18 @@ final case class TypeSignature( }; { val __v = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toBase(lowerBound) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; { val __v = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toBase(upperBound) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(3, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def getTypeParameters: dotty.tools.dotc.semanticdb.Scope = typeParameters.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearTypeParameters: TypeSignature = copy(typeParameters = _root_.scala.None) def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): TypeSignature = copy(typeParameters = Option(__v)) @@ -565,18 +515,17 @@ final case class TypeSignature( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeSignature]) -} -object TypeSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeSignature] { +object TypeSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeSignature]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeSignature] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeSignature = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeSignature = var __typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None var __lowerBound: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var __upperBound: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __typeParameters = Option(__typeParameters.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _))) @@ -585,14 +534,11 @@ object TypeSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.do case 26 => __upperBound = _root_.scala.Some(__upperBound.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.TypeSignature( typeParameters = __typeParameters, lowerBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_lowerBound.toCustom(__lowerBound.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)), upperBound = dotty.tools.dotc.semanticdb.TypeSignature._typemapper_upperBound.toCustom(__upperBound.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) - } @@ -621,70 +567,59 @@ object TypeSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.do upperBound ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeSignature]) -} @SerialVersionUID(0L) final case class ValueSignature( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Signature.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toBase(tpe) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toBase(tpe) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ValueSignature = copy(tpe = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ValueSignature]) -} -object ValueSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ValueSignature] { +object ValueSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ValueSignature]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ValueSignature] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ValueSignature = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ValueSignature = var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ValueSignature( tpe = dotty.tools.dotc.semanticdb.ValueSignature._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) - } @@ -703,4 +638,3 @@ object ValueSignature extends SemanticdbGeneratedMessageCompanion[dotty.tools.d tpe ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ValueSignature]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala index 93fbb207c4f6..b9066748b07b 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolInformation.scala @@ -20,52 +20,46 @@ final case class SymbolInformation( access: dotty.tools.dotc.semanticdb.Access = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toCustom(dotty.tools.dotc.semanticdb.AccessMessage.defaultInstance), overriddenSymbols: _root_.scala.Seq[_root_.scala.Predef.String] = _root_.scala.Seq.empty, documentation: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation] = _root_.scala.None - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = symbol - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(1, __value) - } }; { val __value = language.value - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeEnumSize(16, __value) - } }; { val __value = kind.value - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeEnumSize(3, __value) - } }; { val __value = properties - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeInt32Size(4, __value) - } }; { val __value = displayName - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(5, __value) - } }; { val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toBase(signature) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; annotations.foreach { __item => val __value = __item @@ -74,9 +68,8 @@ final case class SymbolInformation( { val __value = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toBase(access) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; overriddenSymbols.foreach { __item => val __value = __item @@ -87,40 +80,33 @@ final case class SymbolInformation( __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = symbol - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(1, __v) - } }; { val __v = kind.value - if (__v != 0) { + if (__v != 0) _output__.writeEnum(3, __v) - } }; { val __v = properties - if (__v != 0) { + if (__v != 0) _output__.writeInt32(4, __v) - } }; { val __v = displayName - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(5, __v) - } }; annotations.foreach { __v => val __m = __v @@ -130,25 +116,22 @@ final case class SymbolInformation( }; { val __v = language.value - if (__v != 0) { + if (__v != 0) _output__.writeEnum(16, __v) - } }; { val __v = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_signature.toBase(signature) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(17, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; { val __v = dotty.tools.dotc.semanticdb.SymbolInformation._typemapper_access.toBase(access) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(18, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; overriddenSymbols.foreach { __v => val __m = __v @@ -160,7 +143,6 @@ final case class SymbolInformation( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def withSymbol(__v: _root_.scala.Predef.String): SymbolInformation = copy(symbol = __v) def withLanguage(__v: dotty.tools.dotc.semanticdb.Language): SymbolInformation = copy(language = __v) def withKind(__v: dotty.tools.dotc.semanticdb.SymbolInformation.Kind): SymbolInformation = copy(kind = __v) @@ -184,11 +166,10 @@ final case class SymbolInformation( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SymbolInformation]) -} -object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolInformation] { +object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolInformation]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolInformation] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SymbolInformation = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SymbolInformation = var __symbol: _root_.scala.Predef.String = "" var __language: dotty.tools.dotc.semanticdb.Language = dotty.tools.dotc.semanticdb.Language.UNKNOWN_LANGUAGE var __kind: dotty.tools.dotc.semanticdb.SymbolInformation.Kind = dotty.tools.dotc.semanticdb.SymbolInformation.Kind.UNKNOWN_KIND @@ -200,9 +181,9 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool val __overriddenSymbols: _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Predef.String] = new _root_.scala.collection.immutable.VectorBuilder[_root_.scala.Predef.String] var __documentation: _root_.scala.Option[dotty.tools.dotc.semanticdb.Documentation] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __symbol = _input__.readStringRequireUtf8() @@ -225,8 +206,6 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool case 162 => __documentation = Option(__documentation.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Documentation](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.SymbolInformation( symbol = __symbol, language = __language, @@ -239,7 +218,6 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool overriddenSymbols = __overriddenSymbols.result(), documentation = __documentation ) - } @@ -258,7 +236,7 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool overriddenSymbols = _root_.scala.Seq.empty, documentation = _root_.scala.None ) - sealed abstract class Kind(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { + sealed abstract class Kind(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual: type EnumType = Kind def isUnknownKind: _root_.scala.Boolean = false def isLocal: _root_.scala.Boolean = false @@ -278,128 +256,111 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool def isInterface: _root_.scala.Boolean = false final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolInformation.Kind.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolInformation.Kind.Recognized]) - } - object Kind { + object Kind: sealed trait Recognized extends Kind @SerialVersionUID(0L) - case object UNKNOWN_KIND extends Kind(0) with Kind.Recognized { + case object UNKNOWN_KIND extends Kind(0) with Kind.Recognized: val index = 0 val name = "UNKNOWN_KIND" override def isUnknownKind: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object LOCAL extends Kind(19) with Kind.Recognized { + case object LOCAL extends Kind(19) with Kind.Recognized: val index = 1 val name = "LOCAL" override def isLocal: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object FIELD extends Kind(20) with Kind.Recognized { + case object FIELD extends Kind(20) with Kind.Recognized: val index = 2 val name = "FIELD" override def isField: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object METHOD extends Kind(3) with Kind.Recognized { + case object METHOD extends Kind(3) with Kind.Recognized: val index = 3 val name = "METHOD" override def isMethod: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object CONSTRUCTOR extends Kind(21) with Kind.Recognized { + case object CONSTRUCTOR extends Kind(21) with Kind.Recognized: val index = 4 val name = "CONSTRUCTOR" override def isConstructor: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object MACRO extends Kind(6) with Kind.Recognized { + case object MACRO extends Kind(6) with Kind.Recognized: val index = 5 val name = "MACRO" override def isMacro: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object TYPE extends Kind(7) with Kind.Recognized { + case object TYPE extends Kind(7) with Kind.Recognized: val index = 6 val name = "TYPE" override def isType: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object PARAMETER extends Kind(8) with Kind.Recognized { + case object PARAMETER extends Kind(8) with Kind.Recognized: val index = 7 val name = "PARAMETER" override def isParameter: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object SELF_PARAMETER extends Kind(17) with Kind.Recognized { + case object SELF_PARAMETER extends Kind(17) with Kind.Recognized: val index = 8 val name = "SELF_PARAMETER" override def isSelfParameter: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object TYPE_PARAMETER extends Kind(9) with Kind.Recognized { + case object TYPE_PARAMETER extends Kind(9) with Kind.Recognized: val index = 9 val name = "TYPE_PARAMETER" override def isTypeParameter: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object OBJECT extends Kind(10) with Kind.Recognized { + case object OBJECT extends Kind(10) with Kind.Recognized: val index = 10 val name = "OBJECT" override def isObject: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object PACKAGE extends Kind(11) with Kind.Recognized { + case object PACKAGE extends Kind(11) with Kind.Recognized: val index = 11 val name = "PACKAGE" override def isPackage: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object PACKAGE_OBJECT extends Kind(12) with Kind.Recognized { + case object PACKAGE_OBJECT extends Kind(12) with Kind.Recognized: val index = 12 val name = "PACKAGE_OBJECT" override def isPackageObject: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object CLASS extends Kind(13) with Kind.Recognized { + case object CLASS extends Kind(13) with Kind.Recognized: val index = 13 val name = "CLASS" override def isClass: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object TRAIT extends Kind(14) with Kind.Recognized { + case object TRAIT extends Kind(14) with Kind.Recognized: val index = 14 val name = "TRAIT" override def isTrait: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object INTERFACE extends Kind(18) with Kind.Recognized { + case object INTERFACE extends Kind(18) with Kind.Recognized: val index = 15 val name = "INTERFACE" override def isInterface: _root_.scala.Boolean = true - } @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Kind(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_KIND, LOCAL, FIELD, METHOD, CONSTRUCTOR, MACRO, TYPE, PARAMETER, SELF_PARAMETER, TYPE_PARAMETER, OBJECT, PACKAGE, PACKAGE_OBJECT, CLASS, TRAIT, INTERFACE) - def fromValue(__value: _root_.scala.Int): Kind = __value match { + def fromValue(__value: _root_.scala.Int): Kind = __value match case 0 => UNKNOWN_KIND case 3 => METHOD case 6 => MACRO @@ -417,11 +378,9 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool case 20 => FIELD case 21 => CONSTRUCTOR case __other => Unrecognized(__other) - } - } - sealed abstract class Property(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { + sealed abstract class Property(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual: type EnumType = Property def isUnknownProperty: _root_.scala.Boolean = false def isAbstract: _root_.scala.Boolean = false @@ -446,163 +405,141 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool def isOpaque: _root_.scala.Boolean = false final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolInformation.Property.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolInformation.Property.Recognized]) - } - object Property { + object Property: sealed trait Recognized extends Property @SerialVersionUID(0L) - case object UNKNOWN_PROPERTY extends Property(0) with Property.Recognized { + case object UNKNOWN_PROPERTY extends Property(0) with Property.Recognized: val index = 0 val name = "UNKNOWN_PROPERTY" override def isUnknownProperty: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object ABSTRACT extends Property(4) with Property.Recognized { + case object ABSTRACT extends Property(4) with Property.Recognized: val index = 1 val name = "ABSTRACT" override def isAbstract: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object FINAL extends Property(8) with Property.Recognized { + case object FINAL extends Property(8) with Property.Recognized: val index = 2 val name = "FINAL" override def isFinal: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object SEALED extends Property(16) with Property.Recognized { + case object SEALED extends Property(16) with Property.Recognized: val index = 3 val name = "SEALED" override def isSealed: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object IMPLICIT extends Property(32) with Property.Recognized { + case object IMPLICIT extends Property(32) with Property.Recognized: val index = 4 val name = "IMPLICIT" override def isImplicit: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object LAZY extends Property(64) with Property.Recognized { + case object LAZY extends Property(64) with Property.Recognized: val index = 5 val name = "LAZY" override def isLazy: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object CASE extends Property(128) with Property.Recognized { + case object CASE extends Property(128) with Property.Recognized: val index = 6 val name = "CASE" override def isCase: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object COVARIANT extends Property(256) with Property.Recognized { + case object COVARIANT extends Property(256) with Property.Recognized: val index = 7 val name = "COVARIANT" override def isCovariant: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object CONTRAVARIANT extends Property(512) with Property.Recognized { + case object CONTRAVARIANT extends Property(512) with Property.Recognized: val index = 8 val name = "CONTRAVARIANT" override def isContravariant: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object VAL extends Property(1024) with Property.Recognized { + case object VAL extends Property(1024) with Property.Recognized: val index = 9 val name = "VAL" override def isVal: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object VAR extends Property(2048) with Property.Recognized { + case object VAR extends Property(2048) with Property.Recognized: val index = 10 val name = "VAR" override def isVar: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object STATIC extends Property(4096) with Property.Recognized { + case object STATIC extends Property(4096) with Property.Recognized: val index = 11 val name = "STATIC" override def isStatic: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object PRIMARY extends Property(8192) with Property.Recognized { + case object PRIMARY extends Property(8192) with Property.Recognized: val index = 12 val name = "PRIMARY" override def isPrimary: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object ENUM extends Property(16384) with Property.Recognized { + case object ENUM extends Property(16384) with Property.Recognized: val index = 13 val name = "ENUM" override def isEnum: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object DEFAULT extends Property(32768) with Property.Recognized { + case object DEFAULT extends Property(32768) with Property.Recognized: val index = 14 val name = "DEFAULT" override def isDefault: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object GIVEN extends Property(65536) with Property.Recognized { + case object GIVEN extends Property(65536) with Property.Recognized: val index = 15 val name = "GIVEN" override def isGiven: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object INLINE extends Property(131072) with Property.Recognized { + case object INLINE extends Property(131072) with Property.Recognized: val index = 16 val name = "INLINE" override def isInline: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object OPEN extends Property(262144) with Property.Recognized { + case object OPEN extends Property(262144) with Property.Recognized: val index = 17 val name = "OPEN" override def isOpen: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object TRANSPARENT extends Property(524288) with Property.Recognized { + case object TRANSPARENT extends Property(524288) with Property.Recognized: val index = 18 val name = "TRANSPARENT" override def isTransparent: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object INFIX extends Property(1048576) with Property.Recognized { + case object INFIX extends Property(1048576) with Property.Recognized: val index = 19 val name = "INFIX" override def isInfix: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object OPAQUE extends Property(2097152) with Property.Recognized { + case object OPAQUE extends Property(2097152) with Property.Recognized: val index = 20 val name = "OPAQUE" override def isOpaque: _root_.scala.Boolean = true - } @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Property(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_PROPERTY, ABSTRACT, FINAL, SEALED, IMPLICIT, LAZY, CASE, COVARIANT, CONTRAVARIANT, VAL, VAR, STATIC, PRIMARY, ENUM, DEFAULT, GIVEN, INLINE, OPEN, TRANSPARENT, INFIX, OPAQUE) - def fromValue(__value: _root_.scala.Int): Property = __value match { + def fromValue(__value: _root_.scala.Int): Property = __value match case 0 => UNKNOWN_PROPERTY case 4 => ABSTRACT case 8 => FINAL @@ -625,10 +562,8 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool case 1048576 => INFIX case 2097152 => OPAQUE case __other => Unrecognized(__other) - } - } final val SYMBOL_FIELD_NUMBER = 1 final val LANGUAGE_FIELD_NUMBER = 16 final val KIND_FIELD_NUMBER = 3 @@ -667,4 +602,3 @@ object SymbolInformation extends SemanticdbGeneratedMessageCompanion[dotty.tool documentation ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolInformation]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala index 5d7670dfdd32..7109aa7aae06 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/SymbolOccurrence.scala @@ -13,10 +13,10 @@ final case class SymbolOccurrence( range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None, symbol: _root_.scala.Predef.String = "", role: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role = dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.UNKNOWN_ROLE - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (range.isDefined) { val __value = range.get @@ -25,29 +25,24 @@ final case class SymbolOccurrence( { val __value = symbol - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(2, __value) - } }; { val __value = role.value - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeEnumSize(3, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = range.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -56,17 +51,14 @@ final case class SymbolOccurrence( }; { val __v = symbol - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(2, __v) - } }; { val __v = role.value - if (__v != 0) { + if (__v != 0) _output__.writeEnum(3, __v) - } }; - } def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance) def clearRange: SymbolOccurrence = copy(range = _root_.scala.None) def withRange(__v: dotty.tools.dotc.semanticdb.Range): SymbolOccurrence = copy(range = Option(__v)) @@ -77,18 +69,17 @@ final case class SymbolOccurrence( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SymbolOccurrence]) -} -object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolOccurrence] { +object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolOccurrence]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolOccurrence] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SymbolOccurrence = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SymbolOccurrence = var __range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None var __symbol: _root_.scala.Predef.String = "" var __role: dotty.tools.dotc.semanticdb.SymbolOccurrence.Role = dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.UNKNOWN_ROLE var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __range = Option(__range.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Range](_input__))(LiteParser.readMessage(_input__, _))) @@ -97,14 +88,11 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools case 24 => __role = dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.fromValue(_input__.readEnum()) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.SymbolOccurrence( range = __range, symbol = __symbol, role = __role ) - } @@ -116,52 +104,46 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools symbol = "", role = dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.UNKNOWN_ROLE ) - sealed abstract class Role(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual { + sealed abstract class Role(val value: _root_.scala.Int) extends SemanticdbGeneratedEnum derives CanEqual: type EnumType = Role def isUnknownRole: _root_.scala.Boolean = false def isReference: _root_.scala.Boolean = false def isDefinition: _root_.scala.Boolean = false final def asRecognized: _root_.scala.Option[dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.Recognized] = if (isUnrecognized) _root_.scala.None else _root_.scala.Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.SymbolOccurrence.Role.Recognized]) - } - object Role { + object Role: sealed trait Recognized extends Role @SerialVersionUID(0L) - case object UNKNOWN_ROLE extends Role(0) with Role.Recognized { + case object UNKNOWN_ROLE extends Role(0) with Role.Recognized: val index = 0 val name = "UNKNOWN_ROLE" override def isUnknownRole: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object REFERENCE extends Role(1) with Role.Recognized { + case object REFERENCE extends Role(1) with Role.Recognized: val index = 1 val name = "REFERENCE" override def isReference: _root_.scala.Boolean = true - } @SerialVersionUID(0L) - case object DEFINITION extends Role(2) with Role.Recognized { + case object DEFINITION extends Role(2) with Role.Recognized: val index = 2 val name = "DEFINITION" override def isDefinition: _root_.scala.Boolean = true - } @SerialVersionUID(0L) final case class Unrecognized(unrecognizedValue: _root_.scala.Int) extends Role(unrecognizedValue) with SemanticdbUnrecognizedEnum lazy val values = scala.collection.immutable.Seq(UNKNOWN_ROLE, REFERENCE, DEFINITION) - def fromValue(__value: _root_.scala.Int): Role = __value match { + def fromValue(__value: _root_.scala.Int): Role = __value match case 0 => UNKNOWN_ROLE case 1 => REFERENCE case 2 => DEFINITION case __other => Unrecognized(__other) - } - } final val RANGE_FIELD_NUMBER = 1 final val SYMBOL_FIELD_NUMBER = 2 final val ROLE_FIELD_NUMBER = 3 @@ -175,4 +157,3 @@ object SymbolOccurrence extends SemanticdbGeneratedMessageCompanion[dotty.tools role ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SymbolOccurrence]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala index 3c6fcfbf4c6a..864826e67ed2 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Synthetic.scala @@ -12,10 +12,10 @@ import scala.annotation.internal.sharable final case class Synthetic( range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None, tree: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (range.isDefined) { val __value = range.get @@ -24,22 +24,18 @@ final case class Synthetic( { val __value = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toBase(tree) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = range.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -48,13 +44,11 @@ final case class Synthetic( }; { val __v = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toBase(tree) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance) def clearRange: Synthetic = copy(range = _root_.scala.None) def withRange(__v: dotty.tools.dotc.semanticdb.Range): Synthetic = copy(range = Option(__v)) @@ -64,30 +58,26 @@ final case class Synthetic( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Synthetic]) -} -object Synthetic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Synthetic] { +object Synthetic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Synthetic]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Synthetic] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Synthetic = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.Synthetic = var __range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None var __tree: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __range = Option(__range.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Range](_input__))(LiteParser.readMessage(_input__, _))) case 18 => __tree = _root_.scala.Some(__tree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.Synthetic( range = __range, tree = dotty.tools.dotc.semanticdb.Synthetic._typemapper_tree.toCustom(__tree.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)) ) - } @@ -110,4 +100,3 @@ object Synthetic extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s tree ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Synthetic]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala index f0347e86d9e3..f6401f01ca69 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocument.scala @@ -19,45 +19,40 @@ final case class TextDocument( occurrences: _root_.scala.Seq[dotty.tools.dotc.semanticdb.SymbolOccurrence] = _root_.scala.Seq.empty, diagnostics: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Diagnostic] = _root_.scala.Seq.empty, synthetics: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Synthetic] = _root_.scala.Seq.empty - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = schema.value - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeEnumSize(1, __value) - } }; { val __value = uri - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(2, __value) - } }; { val __value = text - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(3, __value) - } }; { val __value = md5 - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(11, __value) - } }; { val __value = language.value - if (__value != 0) { + if (__value != 0) __size += SemanticdbOutputStream.computeEnumSize(10, __value) - } }; symbols.foreach { __item => val __value = __item @@ -76,34 +71,28 @@ final case class TextDocument( __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = schema.value - if (__v != 0) { + if (__v != 0) _output__.writeEnum(1, __v) - } }; { val __v = uri - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(2, __v) - } }; { val __v = text - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(3, __v) - } }; symbols.foreach { __v => val __m = __v @@ -125,15 +114,13 @@ final case class TextDocument( }; { val __v = language.value - if (__v != 0) { + if (__v != 0) _output__.writeEnum(10, __v) - } }; { val __v = md5 - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(11, __v) - } }; synthetics.foreach { __v => val __m = __v @@ -141,7 +128,6 @@ final case class TextDocument( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def withSchema(__v: dotty.tools.dotc.semanticdb.Schema): TextDocument = copy(schema = __v) def withUri(__v: _root_.scala.Predef.String): TextDocument = copy(uri = __v) def withText(__v: _root_.scala.Predef.String): TextDocument = copy(text = __v) @@ -168,11 +154,10 @@ final case class TextDocument( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TextDocument]) -} -object TextDocument extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocument] { +object TextDocument extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocument]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocument] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TextDocument = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TextDocument = var __schema: dotty.tools.dotc.semanticdb.Schema = dotty.tools.dotc.semanticdb.Schema.LEGACY var __uri: _root_.scala.Predef.String = "" var __text: _root_.scala.Predef.String = "" @@ -183,9 +168,9 @@ object TextDocument extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot val __diagnostics: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Diagnostic] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Diagnostic] val __synthetics: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Synthetic] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Synthetic] var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 8 => __schema = dotty.tools.dotc.semanticdb.Schema.fromValue(_input__.readEnum()) @@ -206,8 +191,6 @@ object TextDocument extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot case 98 => __synthetics += LiteParser.readMessage[dotty.tools.dotc.semanticdb.Synthetic](_input__) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.TextDocument( schema = __schema, uri = __uri, @@ -219,7 +202,6 @@ object TextDocument extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot diagnostics = __diagnostics.result(), synthetics = __synthetics.result() ) - } @@ -268,4 +250,3 @@ object TextDocument extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot synthetics ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocument]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala index 41b8e1b3f491..3d1dfc9ac25a 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/TextDocuments.scala @@ -11,34 +11,30 @@ import scala.annotation.internal.sharable @SerialVersionUID(0L) final case class TextDocuments( documents: _root_.scala.Seq[dotty.tools.dotc.semanticdb.TextDocument] = _root_.scala.Seq.empty - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 documents.foreach { __item => val __value = __item __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = documents.foreach { __v => val __m = __v _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def clearDocuments = copy(documents = _root_.scala.Seq.empty) def addDocuments(__vs: dotty.tools.dotc.semanticdb.TextDocument *): TextDocuments = addAllDocuments(__vs) def addAllDocuments(__vs: Iterable[dotty.tools.dotc.semanticdb.TextDocument]): TextDocuments = copy(documents = documents ++ __vs) @@ -48,26 +44,22 @@ final case class TextDocuments( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TextDocuments]) -} -object TextDocuments extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocuments] { +object TextDocuments extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocuments]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocuments] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TextDocuments = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TextDocuments = val __documents: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.TextDocument] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.TextDocument] var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __documents += LiteParser.readMessage[dotty.tools.dotc.semanticdb.TextDocument](_input__) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.TextDocuments( documents = __documents.result() ) - } @@ -84,4 +76,3 @@ object TextDocuments extends SemanticdbGeneratedMessageCompanion[dotty.tools.do documents ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TextDocuments]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala index ed84d9b2f2d0..a031719ba8e4 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Tree.scala @@ -8,22 +8,21 @@ package dotty.tools.dotc.semanticdb import dotty.tools.dotc.semanticdb.internal._ import scala.annotation.internal.sharable -sealed trait Tree extends SemanticdbGeneratedSealedOneof derives CanEqual { +sealed trait Tree extends SemanticdbGeneratedSealedOneof derives CanEqual: type MessageType = dotty.tools.dotc.semanticdb.TreeMessage final def isEmpty = this.isInstanceOf[dotty.tools.dotc.semanticdb.Tree.Empty.type] final def isDefined = !isEmpty final def asMessage: dotty.tools.dotc.semanticdb.TreeMessage = dotty.tools.dotc.semanticdb.Tree.TreeTypeMapper.toBase(this) final def asNonEmpty: Option[dotty.tools.dotc.semanticdb.Tree.NonEmpty] = if (isEmpty) None else Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Tree.NonEmpty]) -} -object Tree { +object Tree: case object Empty extends dotty.tools.dotc.semanticdb.Tree sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Tree def defaultInstance: dotty.tools.dotc.semanticdb.Tree = Empty - implicit val TreeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] { - override def toCustom(__base: dotty.tools.dotc.semanticdb.TreeMessage): dotty.tools.dotc.semanticdb.Tree = __base.sealedValue match { + implicit val TreeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TreeMessage, dotty.tools.dotc.semanticdb.Tree]: + override def toCustom(__base: dotty.tools.dotc.semanticdb.TreeMessage): dotty.tools.dotc.semanticdb.Tree = __base.sealedValue match case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.ApplyTree => __v.value case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.FunctionTree => __v.value case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.IdTree => __v.value @@ -33,7 +32,6 @@ object Tree { case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.SelectTree => __v.value case __v: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.TypeApplyTree => __v.value case dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty => Empty - } override def toBase(__custom: dotty.tools.dotc.semanticdb.Tree): dotty.tools.dotc.semanticdb.TreeMessage = dotty.tools.dotc.semanticdb.TreeMessage(__custom match { case __v: dotty.tools.dotc.semanticdb.ApplyTree => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.ApplyTree(__v) case __v: dotty.tools.dotc.semanticdb.FunctionTree => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.FunctionTree(__v) @@ -45,15 +43,13 @@ object Tree { case __v: dotty.tools.dotc.semanticdb.TypeApplyTree => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.TypeApplyTree(__v) case Empty => dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty }) - } -} @SerialVersionUID(0L) final case class TreeMessage( sealedValue: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (sealedValue.applyTree.isDefined) { val __value = sealedValue.applyTree.get @@ -88,17 +84,14 @@ final case class TreeMessage( __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = sealedValue.applyTree.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -147,7 +140,6 @@ final case class TreeMessage( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def getApplyTree: dotty.tools.dotc.semanticdb.ApplyTree = sealedValue.applyTree.getOrElse(dotty.tools.dotc.semanticdb.ApplyTree.defaultInstance) def withApplyTree(__v: dotty.tools.dotc.semanticdb.ApplyTree): TreeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.ApplyTree(__v)) def getFunctionTree: dotty.tools.dotc.semanticdb.FunctionTree = sealedValue.functionTree.getOrElse(dotty.tools.dotc.semanticdb.FunctionTree.defaultInstance) @@ -172,16 +164,15 @@ final case class TreeMessage( def toTree: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.Tree.TreeTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Tree]) -} -object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TreeMessage] { +object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TreeMessage]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TreeMessage] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TreeMessage = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TreeMessage = var __sealedValue: dotty.tools.dotc.semanticdb.TreeMessage.SealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.ApplyTree(__sealedValue.applyTree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ApplyTree](_input__))(LiteParser.readMessage(_input__, _))) @@ -200,12 +191,9 @@ object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc case 66 => __sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.TypeApplyTree(__sealedValue.typeApplyTree.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeApplyTree](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.TreeMessage( sealedValue = __sealedValue ) - } @@ -215,7 +203,7 @@ object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc lazy val defaultInstance = dotty.tools.dotc.semanticdb.TreeMessage( sealedValue = dotty.tools.dotc.semanticdb.TreeMessage.SealedValue.Empty ) - sealed trait SealedValue extends SemanticdbGeneratedOneof derives CanEqual { + sealed trait SealedValue extends SemanticdbGeneratedOneof derives CanEqual: def isEmpty: _root_.scala.Boolean = false def isDefined: _root_.scala.Boolean = true def isApplyTree: _root_.scala.Boolean = false @@ -234,74 +222,63 @@ object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc def originalTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.OriginalTree] = _root_.scala.None def selectTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.SelectTree] = _root_.scala.None def typeApplyTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeApplyTree] = _root_.scala.None - } - object SealedValue { + object SealedValue: @SerialVersionUID(0L) - case object Empty extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue { + case object Empty extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue: type ValueType = _root_.scala.Nothing override def isEmpty: _root_.scala.Boolean = true override def isDefined: _root_.scala.Boolean = false override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") - } @SerialVersionUID(0L) - final case class ApplyTree(value: dotty.tools.dotc.semanticdb.ApplyTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual { + final case class ApplyTree(value: dotty.tools.dotc.semanticdb.ApplyTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ApplyTree override def isApplyTree: _root_.scala.Boolean = true override def applyTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.ApplyTree] = Some(value) override def number: _root_.scala.Int = 1 - } @SerialVersionUID(0L) - final case class FunctionTree(value: dotty.tools.dotc.semanticdb.FunctionTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual { + final case class FunctionTree(value: dotty.tools.dotc.semanticdb.FunctionTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.FunctionTree override def isFunctionTree: _root_.scala.Boolean = true override def functionTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.FunctionTree] = Some(value) override def number: _root_.scala.Int = 2 - } @SerialVersionUID(0L) - final case class IdTree(value: dotty.tools.dotc.semanticdb.IdTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual { + final case class IdTree(value: dotty.tools.dotc.semanticdb.IdTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.IdTree override def isIdTree: _root_.scala.Boolean = true override def idTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.IdTree] = Some(value) override def number: _root_.scala.Int = 3 - } @SerialVersionUID(0L) - final case class LiteralTree(value: dotty.tools.dotc.semanticdb.LiteralTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual { + final case class LiteralTree(value: dotty.tools.dotc.semanticdb.LiteralTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.LiteralTree override def isLiteralTree: _root_.scala.Boolean = true override def literalTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.LiteralTree] = Some(value) override def number: _root_.scala.Int = 4 - } @SerialVersionUID(0L) - final case class MacroExpansionTree(value: dotty.tools.dotc.semanticdb.MacroExpansionTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual { + final case class MacroExpansionTree(value: dotty.tools.dotc.semanticdb.MacroExpansionTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.MacroExpansionTree override def isMacroExpansionTree: _root_.scala.Boolean = true override def macroExpansionTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.MacroExpansionTree] = Some(value) override def number: _root_.scala.Int = 5 - } @SerialVersionUID(0L) - final case class OriginalTree(value: dotty.tools.dotc.semanticdb.OriginalTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual { + final case class OriginalTree(value: dotty.tools.dotc.semanticdb.OriginalTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.OriginalTree override def isOriginalTree: _root_.scala.Boolean = true override def originalTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.OriginalTree] = Some(value) override def number: _root_.scala.Int = 6 - } @SerialVersionUID(0L) - final case class SelectTree(value: dotty.tools.dotc.semanticdb.SelectTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual { + final case class SelectTree(value: dotty.tools.dotc.semanticdb.SelectTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.SelectTree override def isSelectTree: _root_.scala.Boolean = true override def selectTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.SelectTree] = Some(value) override def number: _root_.scala.Int = 7 - } @SerialVersionUID(0L) - final case class TypeApplyTree(value: dotty.tools.dotc.semanticdb.TypeApplyTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual { + final case class TypeApplyTree(value: dotty.tools.dotc.semanticdb.TypeApplyTree) extends dotty.tools.dotc.semanticdb.TreeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.TypeApplyTree override def isTypeApplyTree: _root_.scala.Boolean = true override def typeApplyTree: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeApplyTree] = Some(value) override def number: _root_.scala.Int = 8 - } - } final val APPLY_TREE_FIELD_NUMBER = 1 final val FUNCTION_TREE_FIELD_NUMBER = 2 final val ID_TREE_FIELD_NUMBER = 3 @@ -316,47 +293,41 @@ object TreeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc sealedValue ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Tree]) -} @SerialVersionUID(0L) final case class ApplyTree( function: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), arguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Tree] = _root_.scala.Seq.empty - ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toBase(function) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; arguments.foreach { __item => val __value = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_arguments.toBase(__item) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toBase(function) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; arguments.foreach { __v => val __m = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_arguments.toBase(__v) @@ -364,7 +335,6 @@ final case class ApplyTree( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def withFunction(__v: dotty.tools.dotc.semanticdb.Tree): ApplyTree = copy(function = __v) def clearArguments = copy(arguments = _root_.scala.Seq.empty) def addArguments(__vs: dotty.tools.dotc.semanticdb.Tree *): ApplyTree = addAllArguments(__vs) @@ -375,30 +345,26 @@ final case class ApplyTree( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ApplyTree]) -} -object ApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ApplyTree] { +object ApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ApplyTree]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ApplyTree] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ApplyTree = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ApplyTree = var __function: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None val __arguments: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Tree] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Tree] var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __function = _root_.scala.Some(__function.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _))) case 18 => __arguments += dotty.tools.dotc.semanticdb.ApplyTree._typemapper_arguments.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__)) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ApplyTree( function = dotty.tools.dotc.semanticdb.ApplyTree._typemapper_function.toCustom(__function.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)), arguments = __arguments.result() ) - } @@ -423,16 +389,15 @@ object ApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s arguments ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ApplyTree]) -} @SerialVersionUID(0L) final case class FunctionTree( parameters: _root_.scala.Seq[dotty.tools.dotc.semanticdb.IdTree] = _root_.scala.Seq.empty, body: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 parameters.foreach { __item => val __value = __item @@ -441,22 +406,18 @@ final case class FunctionTree( { val __value = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toBase(body) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = parameters.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -465,13 +426,11 @@ final case class FunctionTree( }; { val __v = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toBase(body) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def clearParameters = copy(parameters = _root_.scala.Seq.empty) def addParameters(__vs: dotty.tools.dotc.semanticdb.IdTree *): FunctionTree = addAllParameters(__vs) def addAllParameters(__vs: Iterable[dotty.tools.dotc.semanticdb.IdTree]): FunctionTree = copy(parameters = parameters ++ __vs) @@ -482,30 +441,26 @@ final case class FunctionTree( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.FunctionTree]) -} -object FunctionTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FunctionTree] { +object FunctionTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FunctionTree]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FunctionTree] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.FunctionTree = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.FunctionTree = val __parameters: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.IdTree] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.IdTree] var __body: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __parameters += LiteParser.readMessage[dotty.tools.dotc.semanticdb.IdTree](_input__) case 18 => __body = _root_.scala.Some(__body.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.FunctionTree( parameters = __parameters.result(), body = dotty.tools.dotc.semanticdb.FunctionTree._typemapper_body.toCustom(__body.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)) ) - } @@ -528,68 +483,57 @@ object FunctionTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot body ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.FunctionTree]) -} @SerialVersionUID(0L) final case class IdTree( symbol: _root_.scala.Predef.String = "" - ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = symbol - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = symbol - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(1, __v) - } }; - } def withSymbol(__v: _root_.scala.Predef.String): IdTree = copy(symbol = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IdTree]) -} -object IdTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IdTree] { +object IdTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IdTree]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IdTree] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.IdTree = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.IdTree = var __symbol: _root_.scala.Predef.String = "" var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __symbol = _input__.readStringRequireUtf8() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.IdTree( symbol = __symbol ) - } @@ -606,70 +550,59 @@ object IdTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.sema symbol ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IdTree]) -} @SerialVersionUID(0L) final case class LiteralTree( constant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toBase(constant) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toBase(constant) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def withConstant(__v: dotty.tools.dotc.semanticdb.Constant): LiteralTree = copy(constant = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LiteralTree]) -} -object LiteralTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LiteralTree] { +object LiteralTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LiteralTree]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LiteralTree] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.LiteralTree = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.LiteralTree = var __constant: _root_.scala.Option[dotty.tools.dotc.semanticdb.ConstantMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __constant = _root_.scala.Some(__constant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ConstantMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.LiteralTree( constant = dotty.tools.dotc.semanticdb.LiteralTree._typemapper_constant.toCustom(__constant.getOrElse(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)) ) - } @@ -688,60 +621,51 @@ object LiteralTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc constant ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LiteralTree]) -} @SerialVersionUID(0L) final case class MacroExpansionTree( beforeExpansion: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toBase(beforeExpansion) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; { val __value = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toBase(tpe) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toBase(beforeExpansion) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; { val __v = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toBase(tpe) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def withBeforeExpansion(__v: dotty.tools.dotc.semanticdb.Tree): MacroExpansionTree = copy(beforeExpansion = __v) def withTpe(__v: dotty.tools.dotc.semanticdb.Type): MacroExpansionTree = copy(tpe = __v) @@ -749,30 +673,26 @@ final case class MacroExpansionTree( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MacroExpansionTree]) -} -object MacroExpansionTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MacroExpansionTree] { +object MacroExpansionTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MacroExpansionTree]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MacroExpansionTree] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MacroExpansionTree = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MacroExpansionTree = var __beforeExpansion: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __beforeExpansion = _root_.scala.Some(__beforeExpansion.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _))) case 18 => __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.MacroExpansionTree( beforeExpansion = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_beforeExpansion.toCustom(__beforeExpansion.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)), tpe = dotty.tools.dotc.semanticdb.MacroExpansionTree._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) - } @@ -797,39 +717,34 @@ object MacroExpansionTree extends SemanticdbGeneratedMessageCompanion[dotty.too tpe ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MacroExpansionTree]) -} @SerialVersionUID(0L) final case class OriginalTree( range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None - ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (range.isDefined) { val __value = range.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = range.foreach { __v => val __m = __v _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def getRange: dotty.tools.dotc.semanticdb.Range = range.getOrElse(dotty.tools.dotc.semanticdb.Range.defaultInstance) def clearRange: OriginalTree = copy(range = _root_.scala.None) def withRange(__v: dotty.tools.dotc.semanticdb.Range): OriginalTree = copy(range = Option(__v)) @@ -838,26 +753,22 @@ final case class OriginalTree( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.OriginalTree]) -} -object OriginalTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.OriginalTree] { +object OriginalTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.OriginalTree]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.OriginalTree] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.OriginalTree = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.OriginalTree = var __range: _root_.scala.Option[dotty.tools.dotc.semanticdb.Range] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __range = Option(__range.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Range](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.OriginalTree( range = __range ) - } @@ -874,47 +785,41 @@ object OriginalTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot range ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.OriginalTree]) -} @SerialVersionUID(0L) final case class SelectTree( qualifier: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), id: _root_.scala.Option[dotty.tools.dotc.semanticdb.IdTree] = _root_.scala.None - ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toBase(qualifier) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; if (id.isDefined) { val __value = id.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toBase(qualifier) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; id.foreach { __v => val __m = __v @@ -922,7 +827,6 @@ final case class SelectTree( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def withQualifier(__v: dotty.tools.dotc.semanticdb.Tree): SelectTree = copy(qualifier = __v) def getId: dotty.tools.dotc.semanticdb.IdTree = id.getOrElse(dotty.tools.dotc.semanticdb.IdTree.defaultInstance) def clearId: SelectTree = copy(id = _root_.scala.None) @@ -932,30 +836,26 @@ final case class SelectTree( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SelectTree]) -} -object SelectTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SelectTree] { +object SelectTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SelectTree]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SelectTree] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SelectTree = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SelectTree = var __qualifier: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None var __id: _root_.scala.Option[dotty.tools.dotc.semanticdb.IdTree] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __qualifier = _root_.scala.Some(__qualifier.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _))) case 18 => __id = Option(__id.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.IdTree](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.SelectTree( qualifier = dotty.tools.dotc.semanticdb.SelectTree._typemapper_qualifier.toCustom(__qualifier.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)), id = __id ) - } @@ -978,47 +878,41 @@ object SelectTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. id ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SelectTree]) -} @SerialVersionUID(0L) final case class TypeApplyTree( function: dotty.tools.dotc.semanticdb.Tree = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toCustom(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance), typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty - ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Tree.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toBase(function) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; typeArguments.foreach { __item => val __value = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_typeArguments.toBase(__item) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toBase(function) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; typeArguments.foreach { __v => val __m = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_typeArguments.toBase(__v) @@ -1026,7 +920,6 @@ final case class TypeApplyTree( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def withFunction(__v: dotty.tools.dotc.semanticdb.Tree): TypeApplyTree = copy(function = __v) def clearTypeArguments = copy(typeArguments = _root_.scala.Seq.empty) def addTypeArguments(__vs: dotty.tools.dotc.semanticdb.Type *): TypeApplyTree = addAllTypeArguments(__vs) @@ -1037,30 +930,26 @@ final case class TypeApplyTree( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeApplyTree]) -} -object TypeApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeApplyTree] { +object TypeApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeApplyTree]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeApplyTree] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeApplyTree = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeApplyTree = var __function: _root_.scala.Option[dotty.tools.dotc.semanticdb.TreeMessage] = _root_.scala.None val __typeArguments: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __function = _root_.scala.Some(__function.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TreeMessage](_input__))(LiteParser.readMessage(_input__, _))) case 18 => __typeArguments += dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_typeArguments.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__)) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.TypeApplyTree( function = dotty.tools.dotc.semanticdb.TypeApplyTree._typemapper_function.toCustom(__function.getOrElse(dotty.tools.dotc.semanticdb.TreeMessage.defaultInstance)), typeArguments = __typeArguments.result() ) - } @@ -1085,4 +974,3 @@ object TypeApplyTree extends SemanticdbGeneratedMessageCompanion[dotty.tools.do typeArguments ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeApplyTree]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala index be9cc6034f2c..b74407ab8b4a 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/generated/Type.scala @@ -8,22 +8,21 @@ package dotty.tools.dotc.semanticdb import dotty.tools.dotc.semanticdb.internal._ import scala.annotation.internal.sharable -sealed trait Type extends SemanticdbGeneratedSealedOneof derives CanEqual { +sealed trait Type extends SemanticdbGeneratedSealedOneof derives CanEqual: type MessageType = dotty.tools.dotc.semanticdb.TypeMessage final def isEmpty = this.isInstanceOf[dotty.tools.dotc.semanticdb.Type.Empty.type] final def isDefined = !isEmpty final def asMessage: dotty.tools.dotc.semanticdb.TypeMessage = dotty.tools.dotc.semanticdb.Type.TypeTypeMapper.toBase(this) final def asNonEmpty: Option[dotty.tools.dotc.semanticdb.Type.NonEmpty] = if (isEmpty) None else Some(this.asInstanceOf[dotty.tools.dotc.semanticdb.Type.NonEmpty]) -} -object Type { +object Type: case object Empty extends dotty.tools.dotc.semanticdb.Type sealed trait NonEmpty extends dotty.tools.dotc.semanticdb.Type def defaultInstance: dotty.tools.dotc.semanticdb.Type = Empty - implicit val TypeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] { - override def toCustom(__base: dotty.tools.dotc.semanticdb.TypeMessage): dotty.tools.dotc.semanticdb.Type = __base.sealedValue match { + implicit val TypeTypeMapper: SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type] = new SemanticdbTypeMapper[dotty.tools.dotc.semanticdb.TypeMessage, dotty.tools.dotc.semanticdb.Type]: + override def toCustom(__base: dotty.tools.dotc.semanticdb.TypeMessage): dotty.tools.dotc.semanticdb.Type = __base.sealedValue match case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef => __v.value case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.SingleType => __v.value case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.ThisType => __v.value @@ -41,7 +40,6 @@ object Type { case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.MatchType => __v.value case __v: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType => __v.value case dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty => Empty - } override def toBase(__custom: dotty.tools.dotc.semanticdb.Type): dotty.tools.dotc.semanticdb.TypeMessage = dotty.tools.dotc.semanticdb.TypeMessage(__custom match { case __v: dotty.tools.dotc.semanticdb.TypeRef => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef(__v) case __v: dotty.tools.dotc.semanticdb.SingleType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.SingleType(__v) @@ -61,15 +59,13 @@ object Type { case __v: dotty.tools.dotc.semanticdb.LambdaType => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType(__v) case Empty => dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty }) - } -} @SerialVersionUID(0L) final case class TypeMessage( sealedValue: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (sealedValue.typeRef.isDefined) { val __value = sealedValue.typeRef.get @@ -136,17 +132,14 @@ final case class TypeMessage( __size += 2 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = sealedValue.typeRef.foreach { __v => val __m = __v _output__.writeTag(2, 2) @@ -243,7 +236,6 @@ final case class TypeMessage( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def getTypeRef: dotty.tools.dotc.semanticdb.TypeRef = sealedValue.typeRef.getOrElse(dotty.tools.dotc.semanticdb.TypeRef.defaultInstance) def withTypeRef(__v: dotty.tools.dotc.semanticdb.TypeRef): TypeMessage = copy(sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef(__v)) def getSingleType: dotty.tools.dotc.semanticdb.SingleType = sealedValue.singleType.getOrElse(dotty.tools.dotc.semanticdb.SingleType.defaultInstance) @@ -284,16 +276,15 @@ final case class TypeMessage( def toType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.Type.TypeTypeMapper.toCustom(this) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.Type]) -} -object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeMessage] { +object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeMessage]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeMessage] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeMessage = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeMessage = var __sealedValue: dotty.tools.dotc.semanticdb.TypeMessage.SealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 18 => __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.TypeRef(__sealedValue.typeRef.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeRef](_input__))(LiteParser.readMessage(_input__, _))) @@ -328,12 +319,9 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc case 210 => __sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.LambdaType(__sealedValue.lambdaType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.LambdaType](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.TypeMessage( sealedValue = __sealedValue ) - } @@ -343,7 +331,7 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc lazy val defaultInstance = dotty.tools.dotc.semanticdb.TypeMessage( sealedValue = dotty.tools.dotc.semanticdb.TypeMessage.SealedValue.Empty ) - sealed trait SealedValue extends SemanticdbGeneratedOneof derives CanEqual { + sealed trait SealedValue extends SemanticdbGeneratedOneof derives CanEqual: def isEmpty: _root_.scala.Boolean = false def isDefined: _root_.scala.Boolean = true def isTypeRef: _root_.scala.Boolean = false @@ -378,130 +366,111 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc def repeatedType: _root_.scala.Option[dotty.tools.dotc.semanticdb.RepeatedType] = _root_.scala.None def matchType: _root_.scala.Option[dotty.tools.dotc.semanticdb.MatchType] = _root_.scala.None def lambdaType: _root_.scala.Option[dotty.tools.dotc.semanticdb.LambdaType] = _root_.scala.None - } - object SealedValue { + object SealedValue: @SerialVersionUID(0L) - case object Empty extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue { + case object Empty extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue: type ValueType = _root_.scala.Nothing override def isEmpty: _root_.scala.Boolean = true override def isDefined: _root_.scala.Boolean = false override def number: _root_.scala.Int = 0 override def value: _root_.scala.Nothing = throw new java.util.NoSuchElementException("Empty.value") - } @SerialVersionUID(0L) - final case class TypeRef(value: dotty.tools.dotc.semanticdb.TypeRef) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class TypeRef(value: dotty.tools.dotc.semanticdb.TypeRef) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.TypeRef override def isTypeRef: _root_.scala.Boolean = true override def typeRef: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeRef] = Some(value) override def number: _root_.scala.Int = 2 - } @SerialVersionUID(0L) - final case class SingleType(value: dotty.tools.dotc.semanticdb.SingleType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class SingleType(value: dotty.tools.dotc.semanticdb.SingleType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.SingleType override def isSingleType: _root_.scala.Boolean = true override def singleType: _root_.scala.Option[dotty.tools.dotc.semanticdb.SingleType] = Some(value) override def number: _root_.scala.Int = 20 - } @SerialVersionUID(0L) - final case class ThisType(value: dotty.tools.dotc.semanticdb.ThisType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class ThisType(value: dotty.tools.dotc.semanticdb.ThisType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ThisType override def isThisType: _root_.scala.Boolean = true override def thisType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ThisType] = Some(value) override def number: _root_.scala.Int = 21 - } @SerialVersionUID(0L) - final case class SuperType(value: dotty.tools.dotc.semanticdb.SuperType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class SuperType(value: dotty.tools.dotc.semanticdb.SuperType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.SuperType override def isSuperType: _root_.scala.Boolean = true override def superType: _root_.scala.Option[dotty.tools.dotc.semanticdb.SuperType] = Some(value) override def number: _root_.scala.Int = 22 - } @SerialVersionUID(0L) - final case class ConstantType(value: dotty.tools.dotc.semanticdb.ConstantType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class ConstantType(value: dotty.tools.dotc.semanticdb.ConstantType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ConstantType override def isConstantType: _root_.scala.Boolean = true override def constantType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ConstantType] = Some(value) override def number: _root_.scala.Int = 23 - } @SerialVersionUID(0L) - final case class IntersectionType(value: dotty.tools.dotc.semanticdb.IntersectionType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class IntersectionType(value: dotty.tools.dotc.semanticdb.IntersectionType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.IntersectionType override def isIntersectionType: _root_.scala.Boolean = true override def intersectionType: _root_.scala.Option[dotty.tools.dotc.semanticdb.IntersectionType] = Some(value) override def number: _root_.scala.Int = 17 - } @SerialVersionUID(0L) - final case class UnionType(value: dotty.tools.dotc.semanticdb.UnionType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class UnionType(value: dotty.tools.dotc.semanticdb.UnionType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.UnionType override def isUnionType: _root_.scala.Boolean = true override def unionType: _root_.scala.Option[dotty.tools.dotc.semanticdb.UnionType] = Some(value) override def number: _root_.scala.Int = 18 - } @SerialVersionUID(0L) - final case class WithType(value: dotty.tools.dotc.semanticdb.WithType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class WithType(value: dotty.tools.dotc.semanticdb.WithType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.WithType override def isWithType: _root_.scala.Boolean = true override def withType: _root_.scala.Option[dotty.tools.dotc.semanticdb.WithType] = Some(value) override def number: _root_.scala.Int = 19 - } @SerialVersionUID(0L) - final case class StructuralType(value: dotty.tools.dotc.semanticdb.StructuralType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class StructuralType(value: dotty.tools.dotc.semanticdb.StructuralType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.StructuralType override def isStructuralType: _root_.scala.Boolean = true override def structuralType: _root_.scala.Option[dotty.tools.dotc.semanticdb.StructuralType] = Some(value) override def number: _root_.scala.Int = 7 - } @SerialVersionUID(0L) - final case class AnnotatedType(value: dotty.tools.dotc.semanticdb.AnnotatedType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class AnnotatedType(value: dotty.tools.dotc.semanticdb.AnnotatedType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.AnnotatedType override def isAnnotatedType: _root_.scala.Boolean = true override def annotatedType: _root_.scala.Option[dotty.tools.dotc.semanticdb.AnnotatedType] = Some(value) override def number: _root_.scala.Int = 8 - } @SerialVersionUID(0L) - final case class ExistentialType(value: dotty.tools.dotc.semanticdb.ExistentialType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class ExistentialType(value: dotty.tools.dotc.semanticdb.ExistentialType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ExistentialType override def isExistentialType: _root_.scala.Boolean = true override def existentialType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ExistentialType] = Some(value) override def number: _root_.scala.Int = 9 - } @SerialVersionUID(0L) - final case class UniversalType(value: dotty.tools.dotc.semanticdb.UniversalType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class UniversalType(value: dotty.tools.dotc.semanticdb.UniversalType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.UniversalType override def isUniversalType: _root_.scala.Boolean = true override def universalType: _root_.scala.Option[dotty.tools.dotc.semanticdb.UniversalType] = Some(value) override def number: _root_.scala.Int = 10 - } @SerialVersionUID(0L) - final case class ByNameType(value: dotty.tools.dotc.semanticdb.ByNameType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class ByNameType(value: dotty.tools.dotc.semanticdb.ByNameType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.ByNameType override def isByNameType: _root_.scala.Boolean = true override def byNameType: _root_.scala.Option[dotty.tools.dotc.semanticdb.ByNameType] = Some(value) override def number: _root_.scala.Int = 13 - } @SerialVersionUID(0L) - final case class RepeatedType(value: dotty.tools.dotc.semanticdb.RepeatedType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class RepeatedType(value: dotty.tools.dotc.semanticdb.RepeatedType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.RepeatedType override def isRepeatedType: _root_.scala.Boolean = true override def repeatedType: _root_.scala.Option[dotty.tools.dotc.semanticdb.RepeatedType] = Some(value) override def number: _root_.scala.Int = 14 - } @SerialVersionUID(0L) - final case class MatchType(value: dotty.tools.dotc.semanticdb.MatchType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class MatchType(value: dotty.tools.dotc.semanticdb.MatchType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.MatchType override def isMatchType: _root_.scala.Boolean = true override def matchType: _root_.scala.Option[dotty.tools.dotc.semanticdb.MatchType] = Some(value) override def number: _root_.scala.Int = 25 - } @SerialVersionUID(0L) - final case class LambdaType(value: dotty.tools.dotc.semanticdb.LambdaType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual { + final case class LambdaType(value: dotty.tools.dotc.semanticdb.LambdaType) extends dotty.tools.dotc.semanticdb.TypeMessage.SealedValue derives CanEqual: type ValueType = dotty.tools.dotc.semanticdb.LambdaType override def isLambdaType: _root_.scala.Boolean = true override def lambdaType: _root_.scala.Option[dotty.tools.dotc.semanticdb.LambdaType] = Some(value) override def number: _root_.scala.Int = 26 - } - } final val TYPE_REF_FIELD_NUMBER = 2 final val SINGLE_TYPE_FIELD_NUMBER = 20 final val THIS_TYPE_FIELD_NUMBER = 21 @@ -524,61 +493,53 @@ object TypeMessage extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc sealedValue ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.Type]) -} @SerialVersionUID(0L) final case class TypeRef( prefix: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), symbol: _root_.scala.Predef.String = "", typeArguments: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toBase(prefix) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; { val __value = symbol - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(2, __value) - } }; typeArguments.foreach { __item => val __value = dotty.tools.dotc.semanticdb.TypeRef._typemapper_typeArguments.toBase(__item) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toBase(prefix) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; { val __v = symbol - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(2, __v) - } }; typeArguments.foreach { __v => val __m = dotty.tools.dotc.semanticdb.TypeRef._typemapper_typeArguments.toBase(__v) @@ -586,7 +547,6 @@ final case class TypeRef( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): TypeRef = copy(prefix = __v) def withSymbol(__v: _root_.scala.Predef.String): TypeRef = copy(symbol = __v) def clearTypeArguments = copy(typeArguments = _root_.scala.Seq.empty) @@ -598,18 +558,17 @@ final case class TypeRef( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.TypeRef]) -} -object TypeRef extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeRef] { +object TypeRef extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeRef]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeRef] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeRef = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.TypeRef = var __prefix: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var __symbol: _root_.scala.Predef.String = "" val __typeArguments: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __prefix = _root_.scala.Some(__prefix.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) @@ -618,14 +577,11 @@ object TypeRef extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.sem case 26 => __typeArguments += dotty.tools.dotc.semanticdb.TypeRef._typemapper_typeArguments.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__)) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.TypeRef( prefix = dotty.tools.dotc.semanticdb.TypeRef._typemapper_prefix.toCustom(__prefix.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)), symbol = __symbol, typeArguments = __typeArguments.result() ) - } @@ -654,58 +610,49 @@ object TypeRef extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.sem typeArguments ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.TypeRef]) -} @SerialVersionUID(0L) final case class SingleType( prefix: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), symbol: _root_.scala.Predef.String = "" - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toBase(prefix) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; { val __value = symbol - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(2, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toBase(prefix) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; { val __v = symbol - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(2, __v) - } }; - } def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): SingleType = copy(prefix = __v) def withSymbol(__v: _root_.scala.Predef.String): SingleType = copy(symbol = __v) @@ -713,30 +660,26 @@ final case class SingleType( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SingleType]) -} -object SingleType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SingleType] { +object SingleType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SingleType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SingleType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SingleType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SingleType = var __prefix: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var __symbol: _root_.scala.Predef.String = "" var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __prefix = _root_.scala.Some(__prefix.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case 18 => __symbol = _input__.readStringRequireUtf8() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.SingleType( prefix = dotty.tools.dotc.semanticdb.SingleType._typemapper_prefix.toCustom(__prefix.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)), symbol = __symbol ) - } @@ -759,68 +702,57 @@ object SingleType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. symbol ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SingleType]) -} @SerialVersionUID(0L) final case class ThisType( symbol: _root_.scala.Predef.String = "" - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = symbol - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(1, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = symbol - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(1, __v) - } }; - } def withSymbol(__v: _root_.scala.Predef.String): ThisType = copy(symbol = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ThisType]) -} -object ThisType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ThisType] { +object ThisType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ThisType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ThisType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ThisType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ThisType = var __symbol: _root_.scala.Predef.String = "" var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __symbol = _input__.readStringRequireUtf8() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ThisType( symbol = __symbol ) - } @@ -837,58 +769,49 @@ object ThisType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se symbol ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ThisType]) -} @SerialVersionUID(0L) final case class SuperType( prefix: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), symbol: _root_.scala.Predef.String = "" - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toBase(prefix) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; { val __value = symbol - if (!__value.isEmpty) { + if (!__value.isEmpty) __size += SemanticdbOutputStream.computeStringSize(2, __value) - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toBase(prefix) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; { val __v = symbol - if (!__v.isEmpty) { + if (!__v.isEmpty) _output__.writeString(2, __v) - } }; - } def withPrefix(__v: dotty.tools.dotc.semanticdb.Type): SuperType = copy(prefix = __v) def withSymbol(__v: _root_.scala.Predef.String): SuperType = copy(symbol = __v) @@ -896,30 +819,26 @@ final case class SuperType( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.SuperType]) -} -object SuperType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SuperType] { +object SuperType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SuperType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SuperType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SuperType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.SuperType = var __prefix: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var __symbol: _root_.scala.Predef.String = "" var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __prefix = _root_.scala.Some(__prefix.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case 18 => __symbol = _input__.readStringRequireUtf8() case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.SuperType( prefix = dotty.tools.dotc.semanticdb.SuperType._typemapper_prefix.toCustom(__prefix.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)), symbol = __symbol ) - } @@ -942,70 +861,59 @@ object SuperType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s symbol ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.SuperType]) -} @SerialVersionUID(0L) final case class ConstantType( constant: dotty.tools.dotc.semanticdb.Constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toBase(constant) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toBase(constant) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def withConstant(__v: dotty.tools.dotc.semanticdb.Constant): ConstantType = copy(constant = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ConstantType]) -} -object ConstantType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantType] { +object ConstantType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ConstantType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ConstantType = var __constant: _root_.scala.Option[dotty.tools.dotc.semanticdb.ConstantMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __constant = _root_.scala.Some(__constant.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.ConstantMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ConstantType( constant = dotty.tools.dotc.semanticdb.ConstantType._typemapper_constant.toCustom(__constant.getOrElse(dotty.tools.dotc.semanticdb.ConstantMessage.defaultInstance)) ) - } @@ -1024,39 +932,34 @@ object ConstantType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot constant ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ConstantType]) -} @SerialVersionUID(0L) final case class IntersectionType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.IntersectionType._typemapper_types.toBase(__item) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = types.foreach { __v => val __m = dotty.tools.dotc.semanticdb.IntersectionType._typemapper_types.toBase(__v) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def clearTypes = copy(types = _root_.scala.Seq.empty) def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): IntersectionType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): IntersectionType = copy(types = types ++ __vs) @@ -1066,26 +969,22 @@ final case class IntersectionType( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.IntersectionType]) -} -object IntersectionType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntersectionType] { +object IntersectionType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntersectionType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntersectionType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.IntersectionType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.IntersectionType = val __types: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __types += dotty.tools.dotc.semanticdb.IntersectionType._typemapper_types.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__)) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.IntersectionType( types = __types.result() ) - } @@ -1104,39 +1003,34 @@ object IntersectionType extends SemanticdbGeneratedMessageCompanion[dotty.tools types ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.IntersectionType]) -} @SerialVersionUID(0L) final case class UnionType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.UnionType._typemapper_types.toBase(__item) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = types.foreach { __v => val __m = dotty.tools.dotc.semanticdb.UnionType._typemapper_types.toBase(__v) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def clearTypes = copy(types = _root_.scala.Seq.empty) def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): UnionType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): UnionType = copy(types = types ++ __vs) @@ -1146,26 +1040,22 @@ final case class UnionType( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UnionType]) -} -object UnionType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnionType] { +object UnionType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnionType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnionType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.UnionType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.UnionType = val __types: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __types += dotty.tools.dotc.semanticdb.UnionType._typemapper_types.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__)) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.UnionType( types = __types.result() ) - } @@ -1184,39 +1074,34 @@ object UnionType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s types ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UnionType]) -} @SerialVersionUID(0L) final case class WithType( types: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Type] = _root_.scala.Seq.empty - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 types.foreach { __item => val __value = dotty.tools.dotc.semanticdb.WithType._typemapper_types.toBase(__item) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = types.foreach { __v => val __m = dotty.tools.dotc.semanticdb.WithType._typemapper_types.toBase(__v) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def clearTypes = copy(types = _root_.scala.Seq.empty) def addTypes(__vs: dotty.tools.dotc.semanticdb.Type *): WithType = addAllTypes(__vs) def addAllTypes(__vs: Iterable[dotty.tools.dotc.semanticdb.Type]): WithType = copy(types = types ++ __vs) @@ -1226,26 +1111,22 @@ final case class WithType( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.WithType]) -} -object WithType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.WithType] { +object WithType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.WithType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.WithType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.WithType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.WithType = val __types: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Type] var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __types += dotty.tools.dotc.semanticdb.WithType._typemapper_types.toCustom(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__)) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.WithType( types = __types.result() ) - } @@ -1264,47 +1145,41 @@ object WithType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.se types ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.WithType]) -} @SerialVersionUID(0L) final case class StructuralType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toBase(tpe) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; if (declarations.isDefined) { val __value = declarations.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toBase(tpe) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(4, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; declarations.foreach { __v => val __m = __v @@ -1312,7 +1187,6 @@ final case class StructuralType( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): StructuralType = copy(tpe = __v) def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearDeclarations: StructuralType = copy(declarations = _root_.scala.None) @@ -1322,30 +1196,26 @@ final case class StructuralType( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.StructuralType]) -} -object StructuralType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StructuralType] { +object StructuralType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StructuralType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StructuralType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.StructuralType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.StructuralType = var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var __declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 34 => __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case 42 => __declarations = Option(__declarations.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.StructuralType( tpe = dotty.tools.dotc.semanticdb.StructuralType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)), declarations = __declarations ) - } @@ -1368,16 +1238,15 @@ object StructuralType extends SemanticdbGeneratedMessageCompanion[dotty.tools.d declarations ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.StructuralType]) -} @SerialVersionUID(0L) final case class AnnotatedType( annotations: _root_.scala.Seq[dotty.tools.dotc.semanticdb.Annotation] = _root_.scala.Seq.empty, tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 annotations.foreach { __item => val __value = __item @@ -1386,29 +1255,24 @@ final case class AnnotatedType( { val __value = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toBase(tpe) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toBase(tpe) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; annotations.foreach { __v => val __m = __v @@ -1416,7 +1280,6 @@ final case class AnnotatedType( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def clearAnnotations = copy(annotations = _root_.scala.Seq.empty) def addAnnotations(__vs: dotty.tools.dotc.semanticdb.Annotation *): AnnotatedType = addAllAnnotations(__vs) def addAllAnnotations(__vs: Iterable[dotty.tools.dotc.semanticdb.Annotation]): AnnotatedType = copy(annotations = annotations ++ __vs) @@ -1427,30 +1290,26 @@ final case class AnnotatedType( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.AnnotatedType]) -} -object AnnotatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AnnotatedType] { +object AnnotatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AnnotatedType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AnnotatedType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.AnnotatedType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.AnnotatedType = val __annotations: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Annotation] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.Annotation] var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 26 => __annotations += LiteParser.readMessage[dotty.tools.dotc.semanticdb.Annotation](_input__) case 10 => __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.AnnotatedType( annotations = __annotations.result(), tpe = dotty.tools.dotc.semanticdb.AnnotatedType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) - } @@ -1473,47 +1332,41 @@ object AnnotatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.do tpe ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.AnnotatedType]) -} @SerialVersionUID(0L) final case class ExistentialType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toBase(tpe) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; if (declarations.isDefined) { val __value = declarations.get __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toBase(tpe) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; declarations.foreach { __v => val __m = __v @@ -1521,7 +1374,6 @@ final case class ExistentialType( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ExistentialType = copy(tpe = __v) def getDeclarations: dotty.tools.dotc.semanticdb.Scope = declarations.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearDeclarations: ExistentialType = copy(declarations = _root_.scala.None) @@ -1531,30 +1383,26 @@ final case class ExistentialType( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ExistentialType]) -} -object ExistentialType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ExistentialType] { +object ExistentialType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ExistentialType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ExistentialType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ExistentialType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ExistentialType = var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var __declarations: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case 26 => __declarations = Option(__declarations.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ExistentialType( tpe = dotty.tools.dotc.semanticdb.ExistentialType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)), declarations = __declarations ) - } @@ -1577,16 +1425,15 @@ object ExistentialType extends SemanticdbGeneratedMessageCompanion[dotty.tools. declarations ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ExistentialType]) -} @SerialVersionUID(0L) final case class UniversalType( typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None, tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (typeParameters.isDefined) { val __value = typeParameters.get @@ -1595,29 +1442,24 @@ final case class UniversalType( { val __value = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toBase(tpe) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toBase(tpe) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; typeParameters.foreach { __v => val __m = __v @@ -1625,7 +1467,6 @@ final case class UniversalType( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def getTypeParameters: dotty.tools.dotc.semanticdb.Scope = typeParameters.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearTypeParameters: UniversalType = copy(typeParameters = _root_.scala.None) def withTypeParameters(__v: dotty.tools.dotc.semanticdb.Scope): UniversalType = copy(typeParameters = Option(__v)) @@ -1635,30 +1476,26 @@ final case class UniversalType( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.UniversalType]) -} -object UniversalType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UniversalType] { +object UniversalType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UniversalType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UniversalType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.UniversalType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.UniversalType = var __typeParameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 26 => __typeParameters = Option(__typeParameters.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _))) case 18 => __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.UniversalType( typeParameters = __typeParameters, tpe = dotty.tools.dotc.semanticdb.UniversalType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) - } @@ -1681,70 +1518,59 @@ object UniversalType extends SemanticdbGeneratedMessageCompanion[dotty.tools.do tpe ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.UniversalType]) -} @SerialVersionUID(0L) final case class ByNameType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toBase(tpe) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toBase(tpe) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): ByNameType = copy(tpe = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.ByNameType]) -} -object ByNameType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByNameType] { +object ByNameType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByNameType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByNameType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ByNameType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.ByNameType = var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.ByNameType( tpe = dotty.tools.dotc.semanticdb.ByNameType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) - } @@ -1763,70 +1589,59 @@ object ByNameType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. tpe ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.ByNameType]) -} @SerialVersionUID(0L) final case class RepeatedType( tpe: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toBase(tpe) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toBase(tpe) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def withTpe(__v: dotty.tools.dotc.semanticdb.Type): RepeatedType = copy(tpe = __v) // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.RepeatedType]) -} -object RepeatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.RepeatedType] { +object RepeatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.RepeatedType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.RepeatedType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.RepeatedType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.RepeatedType = var __tpe: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __tpe = _root_.scala.Some(__tpe.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.RepeatedType( tpe = dotty.tools.dotc.semanticdb.RepeatedType._typemapper_tpe.toCustom(__tpe.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) - } @@ -1845,47 +1660,41 @@ object RepeatedType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dot tpe ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.RepeatedType]) -} @SerialVersionUID(0L) final case class MatchType( scrutinee: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MatchType._typemapper_scrutinee.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), cases: _root_.scala.Seq[dotty.tools.dotc.semanticdb.MatchType.CaseType] = _root_.scala.Seq.empty - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.MatchType._typemapper_scrutinee.toBase(scrutinee) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; cases.foreach { __item => val __value = __item __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize } __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.MatchType._typemapper_scrutinee.toBase(scrutinee) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; cases.foreach { __v => val __m = __v @@ -1893,7 +1702,6 @@ final case class MatchType( _output__.writeUInt32NoTag(__m.serializedSize) __m.writeTo(_output__) }; - } def withScrutinee(__v: dotty.tools.dotc.semanticdb.Type): MatchType = copy(scrutinee = __v) def clearCases = copy(cases = _root_.scala.Seq.empty) def addCases(__vs: dotty.tools.dotc.semanticdb.MatchType.CaseType *): MatchType = addAllCases(__vs) @@ -1904,30 +1712,26 @@ final case class MatchType( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MatchType]) -} -object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType] { +object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MatchType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MatchType = var __scrutinee: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None val __cases: _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.MatchType.CaseType] = new _root_.scala.collection.immutable.VectorBuilder[dotty.tools.dotc.semanticdb.MatchType.CaseType] var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __scrutinee = _root_.scala.Some(__scrutinee.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case 18 => __cases += LiteParser.readMessage[dotty.tools.dotc.semanticdb.MatchType.CaseType](_input__) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.MatchType( scrutinee = dotty.tools.dotc.semanticdb.MatchType._typemapper_scrutinee.toCustom(__scrutinee.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)), cases = __cases.result() ) - } @@ -1942,54 +1746,46 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s final case class CaseType( key: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_key.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance), body: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) - ) extends SemanticdbGeneratedMessage derives CanEqual { + ) extends SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 { val __value = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_key.toBase(key) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; { val __value = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toBase(body) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { val __v = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_key.toBase(key) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(1, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; { val __v = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toBase(body) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def withKey(__v: dotty.tools.dotc.semanticdb.Type): CaseType = copy(key = __v) def withBody(__v: dotty.tools.dotc.semanticdb.Type): CaseType = copy(body = __v) @@ -1997,30 +1793,26 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.MatchType.CaseType]) - } - object CaseType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType] { + object CaseType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MatchType.CaseType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.MatchType.CaseType = var __key: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var __body: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __key = _root_.scala.Some(__key.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case 18 => __body = _root_.scala.Some(__body.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.MatchType.CaseType( key = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_key.toCustom(__key.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)), body = dotty.tools.dotc.semanticdb.MatchType.CaseType._typemapper_body.toCustom(__body.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) - } @@ -2045,7 +1837,6 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s body ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType.CaseType]) - } final val SCRUTINEE_FIELD_NUMBER = 1 final val CASES_FIELD_NUMBER = 2 @@ -2059,16 +1850,15 @@ object MatchType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.s cases ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.MatchType]) -} @SerialVersionUID(0L) final case class LambdaType( parameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None, returnType: dotty.tools.dotc.semanticdb.Type = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance) - ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual { + ) extends dotty.tools.dotc.semanticdb.Type.NonEmpty with SemanticdbGeneratedMessage derives CanEqual: @transient @sharable private[this] var __serializedSizeMemoized: _root_.scala.Int = 0 - private[this] def __computeSerializedSize(): _root_.scala.Int = { + private[this] def __computeSerializedSize(): _root_.scala.Int = var __size = 0 if (parameters.isDefined) { val __value = parameters.get @@ -2077,22 +1867,18 @@ final case class LambdaType( { val __value = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toBase(returnType) - if (__value.serializedSize != 0) { + if (__value.serializedSize != 0) __size += 1 + SemanticdbOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize - } }; __size - } - override def serializedSize: _root_.scala.Int = { + override def serializedSize: _root_.scala.Int = var __size = __serializedSizeMemoized - if (__size == 0) { + if (__size == 0) __size = __computeSerializedSize() + 1 __serializedSizeMemoized = __size - } __size - 1 - } - def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = { + def writeTo(`_output__`: SemanticdbOutputStream): _root_.scala.Unit = parameters.foreach { __v => val __m = __v _output__.writeTag(1, 2) @@ -2101,13 +1887,11 @@ final case class LambdaType( }; { val __v = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toBase(returnType) - if (__v.serializedSize != 0) { + if (__v.serializedSize != 0) _output__.writeTag(2, 2) _output__.writeUInt32NoTag(__v.serializedSize) __v.writeTo(_output__) - } }; - } def getParameters: dotty.tools.dotc.semanticdb.Scope = parameters.getOrElse(dotty.tools.dotc.semanticdb.Scope.defaultInstance) def clearParameters: LambdaType = copy(parameters = _root_.scala.None) def withParameters(__v: dotty.tools.dotc.semanticdb.Scope): LambdaType = copy(parameters = Option(__v)) @@ -2117,30 +1901,26 @@ final case class LambdaType( // @@protoc_insertion_point(GeneratedMessage[dotty.tools.dotc.semanticdb.LambdaType]) -} -object LambdaType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LambdaType] { +object LambdaType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LambdaType]: implicit def messageCompanion: SemanticdbGeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LambdaType] = this - def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.LambdaType = { + def parseFrom(`_input__`: SemanticdbInputStream): dotty.tools.dotc.semanticdb.LambdaType = var __parameters: _root_.scala.Option[dotty.tools.dotc.semanticdb.Scope] = _root_.scala.None var __returnType: _root_.scala.Option[dotty.tools.dotc.semanticdb.TypeMessage] = _root_.scala.None var _done__ = false - while (!_done__) { + while (!_done__) val _tag__ = _input__.readTag() - _tag__ match { + _tag__ match case 0 => _done__ = true case 10 => __parameters = Option(__parameters.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.Scope](_input__))(LiteParser.readMessage(_input__, _))) case 18 => __returnType = _root_.scala.Some(__returnType.fold(LiteParser.readMessage[dotty.tools.dotc.semanticdb.TypeMessage](_input__))(LiteParser.readMessage(_input__, _))) case tag => _input__.skipField(tag) - } - } dotty.tools.dotc.semanticdb.LambdaType( parameters = __parameters, returnType = dotty.tools.dotc.semanticdb.LambdaType._typemapper_returnType.toCustom(__returnType.getOrElse(dotty.tools.dotc.semanticdb.TypeMessage.defaultInstance)) ) - } @@ -2163,4 +1943,3 @@ object LambdaType extends SemanticdbGeneratedMessageCompanion[dotty.tools.dotc. returnType ) // @@protoc_insertion_point(GeneratedMessageCompanion[dotty.tools.dotc.semanticdb.LambdaType]) -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/InvalidProtocolBufferException.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/InvalidProtocolBufferException.scala index 886067ed7a43..2d471e14c029 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/InvalidProtocolBufferException.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/InvalidProtocolBufferException.scala @@ -5,51 +5,40 @@ import java.io.IOException @SerialVersionUID(-1616151763072450476L) class InvalidProtocolBufferException(description: String) extends IOException(description) -object InvalidProtocolBufferException { +object InvalidProtocolBufferException: - def truncatedMessage(): InvalidProtocolBufferException = { + def truncatedMessage(): InvalidProtocolBufferException = new InvalidProtocolBufferException("While parsing a protocol message, the input ended unexpectedly " + "in the middle of a field. This could mean either that the " + "input has been truncated or that an embedded message " + "misreported its own length.") - } - def negativeSize(): InvalidProtocolBufferException = { + def negativeSize(): InvalidProtocolBufferException = new InvalidProtocolBufferException("SemanticdbInputStream encountered an embedded string or message " + "which claimed to have negative size.") - } - def malformedVarint(): InvalidProtocolBufferException = { + def malformedVarint(): InvalidProtocolBufferException = new InvalidProtocolBufferException("SemanticdbInputStream encountered a malformed varint.") - } - def invalidTag(): InvalidProtocolBufferException = { + def invalidTag(): InvalidProtocolBufferException = new InvalidProtocolBufferException("Protocol message contained an invalid tag (zero).") - } - def invalidEndTag(): InvalidProtocolBufferException = { + def invalidEndTag(): InvalidProtocolBufferException = new InvalidProtocolBufferException("Protocol message end-group tag did not match expected tag.") - } - def invalidWireType(): InvalidProtocolBufferException = { + def invalidWireType(): InvalidProtocolBufferException = new InvalidProtocolBufferException("Protocol message tag had invalid wire type.") - } - def recursionLimitExceeded(): InvalidProtocolBufferException = { + def recursionLimitExceeded(): InvalidProtocolBufferException = new InvalidProtocolBufferException("Protocol message had too many levels of nesting. May be malicious. " + "Use SemanticdbInputStream.setRecursionLimit() to increase the depth limit.") - } - def sizeLimitExceeded(): InvalidProtocolBufferException = { + def sizeLimitExceeded(): InvalidProtocolBufferException = new InvalidProtocolBufferException("Protocol message was too large. May be malicious. " + "Use SemanticdbInputStream.setSizeLimit() to increase the size limit.") - } - def parseFailure(): InvalidProtocolBufferException = { + def parseFailure(): InvalidProtocolBufferException = new InvalidProtocolBufferException("Failed to parse the message.") - } - def invalidUtf8(): InvalidProtocolBufferException = { + def invalidUtf8(): InvalidProtocolBufferException = new InvalidProtocolBufferException("Protocol message had invalid UTF-8.") - } -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/LiteParser.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/LiteParser.scala index 1000fc6601da..69bf8bf9e881 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/LiteParser.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/LiteParser.scala @@ -1,36 +1,33 @@ package dotty.tools.dotc.semanticdb.internal -object LiteParser { +object LiteParser: def readMessage[A <: SemanticdbGeneratedMessage]( input: SemanticdbInputStream, message: A )(implicit cmp: SemanticdbGeneratedMessageCompanion[A] - ): A = { + ): A = val length = input.readRawVarint32() val oldLimit = input.pushLimit(length) val result: A = cmp.merge(message, input) input.checkLastTagWas(0) input.popLimit(oldLimit) result - } def readMessage[A <: SemanticdbGeneratedMessage]( input: SemanticdbInputStream )(implicit cmp: SemanticdbGeneratedMessageCompanion[A] - ): A = { + ): A = val length = input.readRawVarint32() val oldLimit = input.pushLimit(length) val result: A = cmp.parseFrom(input) input.checkLastTagWas(0) input.popLimit(oldLimit) result - } @inline def preferredSemanticdbOutputStreamBufferSize(dataLength: Int) = dataLength min SemanticdbOutputStream.DEFAULT_BUFFER_SIZE -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/MD5.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/MD5.scala index dc10df6a5bba..5f31a85fd755 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/MD5.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/MD5.scala @@ -6,25 +6,20 @@ import java.nio.ByteBuffer import java.nio.charset.StandardCharsets import java.security.MessageDigest -object MD5 { - def compute(string: String): String = { +object MD5: + def compute(string: String): String = compute(ByteBuffer.wrap(string.getBytes(StandardCharsets.UTF_8))) - } - def compute(buffer: ByteBuffer): String = { + def compute(buffer: ByteBuffer): String = val md = MessageDigest.getInstance("MD5") md.update(buffer) bytesToHex(md.digest()) - } private val hexArray = "0123456789ABCDEF".toCharArray - def bytesToHex(bytes: Array[Byte]): String = { + def bytesToHex(bytes: Array[Byte]): String = val hexChars = new Array[Char](bytes.length * 2) var j = 0 - while (j < bytes.length) { + while (j < bytes.length) val v: Int = bytes(j) & 0xFF hexChars(j * 2) = hexArray(v >>> 4) hexChars(j * 2 + 1) = hexArray(v & 0x0F) j += 1 - } new String(hexChars) - } -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbEnum.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbEnum.scala index 18b8edf8dab0..f64fd0da4246 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbEnum.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbEnum.scala @@ -1,5 +1,4 @@ package dotty.tools.dotc.semanticdb.internal -trait SemanticdbEnum { +trait SemanticdbEnum: def value: Int -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbGeneratedMessageCompanion.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbGeneratedMessageCompanion.scala index bad0ff9b8fc8..65288f47caae 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbGeneratedMessageCompanion.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbGeneratedMessageCompanion.scala @@ -2,42 +2,38 @@ package dotty.tools.dotc.semanticdb.internal // https://github.com/scalapb/ScalaPB/blob/194463272125b872b99d4902b7712355a53e96c4/scalapb-runtime/src/main/scala/scalapb/GeneratedMessageCompanion.scala#L61-L68 -trait SemanticdbGeneratedOneof extends Any with Product with Serializable { +trait SemanticdbGeneratedOneof extends Any with Product with Serializable: type ValueType def number: Int def isDefined: Boolean def isEmpty: Boolean def value: ValueType def valueOption: Option[ValueType] = if (isDefined) Some(value) else None -} // https://github.com/scalapb/ScalaPB/blob/194463272125b872b99d4902b7712355a53e96c4/scalapb-runtime/src/main/scala/scalapb/GeneratedMessageCompanion.scala#L72-L138 -trait SemanticdbGeneratedMessage extends Any with Product with Serializable { +trait SemanticdbGeneratedMessage extends Any with Product with Serializable: def serializedSize: Int def writeTo(output: SemanticdbOutputStream): Unit /** Serializes the messgae and returns a byte array containing its raw bytes */ - final def toByteArray: Array[Byte] = { + final def toByteArray: Array[Byte] = val a = new Array[Byte](serializedSize) val outputStream = SemanticdbOutputStream.newInstance(a) writeTo(outputStream) outputStream.checkNoSpaceLeft() a - } -} trait SemanticdbGeneratedSealedOneof extends Any with Product - with Serializable { + with Serializable: type MessageType <: SemanticdbGeneratedMessage def isEmpty: Boolean def isDefined: Boolean def asMessage: MessageType -} -trait SemanticdbGeneratedEnum extends Any with Product with Serializable { +trait SemanticdbGeneratedEnum extends Any with Product with Serializable: type EnumType <: SemanticdbGeneratedEnum def value: Int @@ -50,18 +46,16 @@ trait SemanticdbGeneratedEnum extends Any with Product with Serializable { def isUnrecognized: Boolean = false -} -trait SemanticdbUnrecognizedEnum extends SemanticdbGeneratedEnum { +trait SemanticdbUnrecognizedEnum extends SemanticdbGeneratedEnum: def name = "UNRECOGNIZED" def index = -1 override def isUnrecognized: Boolean = true -} trait SemanticdbGeneratedMessageCompanion[A <: SemanticdbGeneratedMessage] - extends Serializable { + extends Serializable: self => type ValueType = A @@ -73,7 +67,5 @@ trait SemanticdbGeneratedMessageCompanion[A <: SemanticdbGeneratedMessage] ) /** Merges the given message with the additional fields in the steam. */ - def merge(a: A, input: SemanticdbInputStream): A = { + def merge(a: A, input: SemanticdbInputStream): A = parseFrom(a.toByteArray ++ parseFrom(input).toByteArray) - } -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala index 823085d55d92..946881b22ef6 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbInputStream.scala @@ -11,56 +11,45 @@ import SemanticdbInputStream._ import scala.collection.mutable -object SemanticdbInputStream { +object SemanticdbInputStream: def newInstance(input: InputStream): SemanticdbInputStream = new SemanticdbInputStream(input) def newInstance(buf: Array[Byte]): SemanticdbInputStream = newInstance(buf, 0, buf.length) - def newInstance(buf: Array[Byte], off: Int, len: Int): SemanticdbInputStream = { + def newInstance(buf: Array[Byte], off: Int, len: Int): SemanticdbInputStream = val result = new SemanticdbInputStream(buf, off, len) result.pushLimit(len) result - } @throws[InvalidProtocolBufferException] - def readRawVarint32(input: InputStream): Int = { + def readRawVarint32(input: InputStream): Int = val firstByte = input.read() - if (firstByte == -1) { + if (firstByte == -1) throw InvalidProtocolBufferException.truncatedMessage() - } readRawVarint32(firstByte, input) - } - def readRawVarint32(firstByte: Int, input: InputStream): Int = { - if ((firstByte & 0x80) == 0) { + def readRawVarint32(firstByte: Int, input: InputStream): Int = + if ((firstByte & 0x80) == 0) return firstByte - } var result = firstByte & 0x7f var offset = 7 - while (offset < 32) { + while (offset < 32) val b = input.read() - if (b == -1) { + if (b == -1) throw InvalidProtocolBufferException.truncatedMessage() - } result |= (b & 0x7f) << offset - if ((b & 0x80) == 0) { + if ((b & 0x80) == 0) return result - } offset += 7 - } - while (offset < 64) { + while (offset < 64) val b = input.read() - if (b == -1) { + if (b == -1) throw InvalidProtocolBufferException.truncatedMessage() - } - if ((b & 0x80) == 0) { + if ((b & 0x80) == 0) return result - } offset += 7 - } throw InvalidProtocolBufferException.malformedVarint() - } def decodeZigZag32(n: Int): Int = (n >>> 1) ^ -(n & 1) @@ -71,9 +60,8 @@ object SemanticdbInputStream { private val DEFAULT_SIZE_LIMIT = 64 << 20 private val BUFFER_SIZE = 4096 -} -class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { +class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream): /** * The total number of bytes read before the current buffer. The total * bytes read up to the current position can be computed as @@ -97,39 +85,33 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { private var lastTag = 0 - def this(buffer: Array[Byte], offset: Int, len: Int) = { + def this(buffer: Array[Byte], offset: Int, len: Int) = this(buffer, null) bufferPos = offset bufferSize = offset + len totalBytesRetired = -offset - } - def this(is: InputStream) = { + def this(is: InputStream) = this(new Array[Byte](SemanticdbInputStream.BUFFER_SIZE), is) totalBytesRetired = 0 - } /** * Ensures that at least {@code n} bytes are available in the buffer, reading * more bytes from the input if necessary to make it so. Caller must ensure * that the requested space is less than BUFFER_SIZE. */ - private def ensureAvailable(n: Int): Unit = { - if (bufferSize - bufferPos < n) { + private def ensureAvailable(n: Int): Unit = + if (bufferSize - bufferPos < n) refillBuffer(n) - } - } /** * Reads more bytes from the input, making at least {@code n} bytes available * in the buffer. Caller must ensure that the requested space is not yet * available, and that the requested space is less than BUFFER_SIZE. */ - private def refillBuffer(n: Int): Unit = { - if (!tryRefillBuffer(n)) { + private def refillBuffer(n: Int): Unit = + if (!tryRefillBuffer(n)) throw InvalidProtocolBufferException.truncatedMessage() - } - } /** * Tries to read more bytes from the input, making at least {@code n} bytes * available in the buffer. Caller must ensure that the requested space is @@ -138,60 +120,48 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { * @return { @code true} if the bytes could be made available; { @code false} * if the end of the stream or the current limit was reached. */ - private def tryRefillBuffer(n: Int): Boolean = { - if (bufferPos + n <= bufferSize) { + private def tryRefillBuffer(n: Int): Boolean = + if (bufferPos + n <= bufferSize) throw new IllegalStateException( s"refillBuffer() called when $n bytes were already available in buffer") - } if totalBytesRetired + bufferPos + n <= currentLimit && input != null then val pos: Int = bufferPos - if (pos > 0) { - if (bufferSize > pos) { + if (pos > 0) + if (bufferSize > pos) System.arraycopy(buffer, pos, buffer, 0, bufferSize - pos) - } totalBytesRetired += pos bufferSize -= pos bufferPos = 0 - } val bytesRead: Int = input.read(buffer, bufferSize, buffer.length - bufferSize) - if (bytesRead == 0 || bytesRead < -1 || bytesRead > buffer.length) { + if (bytesRead == 0 || bytesRead < -1 || bytesRead > buffer.length) throw new IllegalStateException("InputStream#read(byte[]) returned invalid result: " + bytesRead + "\nThe InputStream implementation is buggy.") - } - if (bytesRead > 0) { + if (bytesRead > 0) bufferSize += bytesRead - if (totalBytesRetired + n - sizeLimit > 0) { + if (totalBytesRetired + n - sizeLimit > 0) throw InvalidProtocolBufferException.sizeLimitExceeded() - } recomputeBufferSizeAfterLimit() return ((bufferSize >= n) || tryRefillBuffer(n)) - } false - } - private def recomputeBufferSizeAfterLimit(): Unit = { + private def recomputeBufferSizeAfterLimit(): Unit = bufferSize += bufferSizeAfterLimit val bufferEnd: Int = totalBytesRetired + bufferSize - if (bufferEnd > currentLimit) { + if (bufferEnd > currentLimit) bufferSizeAfterLimit = bufferEnd - currentLimit bufferSize -= bufferSizeAfterLimit - } - else { + else bufferSizeAfterLimit = 0 - } - } /** * Returns true if the stream has reached the end of the input. This is the * case if either the end of the underlying input source has been reached or * if the stream has reached a limit created using {@link #pushLimit(int)}. */ - def isAtEnd: Boolean = { + def isAtEnd: Boolean = bufferPos == bufferSize && !tryRefillBuffer(1) - } - def getTotalBytesRead() = { + def getTotalBytesRead() = totalBytesRetired + bufferPos - } /** * Sets {@code currentLimit} to (current position) + {@code byteLimit}. This @@ -207,29 +177,25 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { * * @return the old limit. */ - def pushLimit(byteLimit0: Int): Int = { - if (byteLimit0 < 0) { + def pushLimit(byteLimit0: Int): Int = + if (byteLimit0 < 0) throw InvalidProtocolBufferException.negativeSize() - } val byteLimit = byteLimit0 + totalBytesRetired + bufferPos val oldLimit: Int = currentLimit - if (byteLimit > oldLimit) { + if (byteLimit > oldLimit) throw InvalidProtocolBufferException.truncatedMessage() - } currentLimit = byteLimit recomputeBufferSizeAfterLimit() oldLimit - } /** * Discards the current limit, returning to the previous limit. * * @param oldLimit The old limit, as returned by { @code pushLimit}. */ - def popLimit(oldLimit: Int): Unit = { + def popLimit(oldLimit: Int): Unit = currentLimit = oldLimit recomputeBufferSizeAfterLimit() - } /** * Reads and discards a single field, given its tag value. @@ -238,8 +204,8 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { * nothing is skipped. Otherwise, returns { @code true}. */ @throws(classOf[IOException]) - def skipField(tag: Int): Boolean = { - WireFormat.getTagWireType(tag) match { + def skipField(tag: Int): Boolean = + WireFormat.getTagWireType(tag) match case WireFormat.WIRETYPE_VARINT => skipRawVarint() true @@ -260,80 +226,66 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { true case _ => throw InvalidProtocolBufferException.invalidWireType() - } - } /** * Reads and discards an entire message. This will read either until EOF * or until an endgroup tag, whichever comes first. */ - def skipMessage(): Unit = { - while (true) { + def skipMessage(): Unit = + while (true) val tag: Int = readTag() - if (tag == 0 || !skipField(tag)) { + if (tag == 0 || !skipField(tag)) return - } - } - } /** * Reads and discards {@code size} bytes. */ - def skipRawBytes(size: Int): Unit = { - if (size <= (bufferSize - bufferPos) && size >= 0) { + def skipRawBytes(size: Int): Unit = + if (size <= (bufferSize - bufferPos) && size >= 0) bufferPos += size - } - else { + else skipRawBytesSlowPath(size) - } - } /** * Read a raw Varint from the stream. If larger than 32 bits, discard the * upper bits. */ @throws[InvalidProtocolBufferException] - def readRawVarint32(): Int = { + def readRawVarint32(): Int = { var pos: Int = bufferPos - if (bufferSize == pos) { + if (bufferSize == pos) return readRawVarint64SlowPath().toInt - } val buffer: Array[Byte] = this.buffer var x: Int = 0 if ((({ x = buffer(({ pos += 1; pos - 1 })); x - })) >= 0) { + })) >= 0) bufferPos = pos return x - } - else if (bufferSize - pos < 9) { + else if (bufferSize - pos < 9) return readRawVarint64SlowPath().toInt - } else if ((({ x ^= (buffer(({ pos += 1; pos - 1 })) << 7); x - })) < 0) { + })) < 0) x ^= (~0 << 7) - } else if ((({ x ^= (buffer(({ pos += 1; pos - 1 })) << 14); x - })) >= 0) { + })) >= 0) x ^= (~0 << 7) ^ (~0 << 14) - } else if ((({ x ^= (buffer(({ pos += 1; pos - 1 })) << 21); x - })) < 0) { + })) < 0) x ^= (~0 << 7) ^ (~0 << 14) ^ (~0 << 21) - } - else { + else val y: Int = buffer(({ pos += 1; pos - 1 })) @@ -349,67 +301,53 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { pos += 1; pos - 1 })) < 0 && buffer(({ pos += 1; pos - 1 - })) < 0) { + })) < 0) return readRawVarint64SlowPath().toInt - } - } bufferPos = pos return x } //todo: labels is not supported - } - private def skipRawVarint(): Unit = { - if (bufferSize - bufferPos >= 10) { + private def skipRawVarint(): Unit = + if (bufferSize - bufferPos >= 10) val buffer: Array[Byte] = this.buffer var pos: Int = bufferPos var i: Int = 0 - while (i < 10) { + while (i < 10) if (buffer(({ pos += 1; pos - 1 - })) >= 0) { + })) >= 0) bufferPos = pos return - } i += 1; i - 1 - } - } skipRawVarintSlowPath - } @throws(classOf[IOException]) - private def skipRawVarintSlowPath: Unit = { + private def skipRawVarintSlowPath: Unit = var i: Int = 0 - while (i < 10) { - if (readRawByte() >= 0) { + while (i < 10) + if (readRawByte() >= 0) return - } i += 1; i - 1 - } throw InvalidProtocolBufferException.malformedVarint() - } /** * Exactly like skipRawBytes, but caller must have already checked the fast * path: (size <= (bufferSize - pos) && size >= 0) */ - private def skipRawBytesSlowPath(size: Int): Unit = { - if (size < 0) { + private def skipRawBytesSlowPath(size: Int): Unit = + if (size < 0) throw InvalidProtocolBufferException.negativeSize() - } - if (totalBytesRetired + bufferPos + size > currentLimit) { + if (totalBytesRetired + bufferPos + size > currentLimit) skipRawBytes(currentLimit - totalBytesRetired - bufferPos) throw InvalidProtocolBufferException.truncatedMessage() - } var pos: Int = bufferSize - bufferPos bufferPos = bufferSize refillBuffer(1) - while (size - pos > bufferSize) { + while (size - pos > bufferSize) pos += bufferSize bufferPos = bufferSize refillBuffer(1) - } bufferPos = size - pos - } /** * Attempt to read a field tag, returning zero if we have reached EOF. @@ -417,151 +355,123 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { * may legally end wherever a tag occurs, and zero is not a valid tag number. */ @throws[InvalidProtocolBufferException] - def readTag(): Int = { - if (isAtEnd) { + def readTag(): Int = + if (isAtEnd) lastTag = 0 return 0 - } lastTag = readRawVarint32() - if (WireFormat.getTagFieldNumber(lastTag) == 0) { + if (WireFormat.getTagFieldNumber(lastTag) == 0) throw InvalidProtocolBufferException.invalidTag() - } lastTag - } - def readString(): String = { + def readString(): String = val size: Int = readRawVarint32() - if (size <= (bufferSize - bufferPos) && size > 0) { + if (size <= (bufferSize - bufferPos) && size > 0) val result: String = new String(buffer, bufferPos, size, StandardCharsets.UTF_8) bufferPos += size return result - } - else if (size == 0) { + else if (size == 0) return "" - } - else { + else return new String(readRawBytesSlowPath(size), StandardCharsets.UTF_8) - } - } - def readStringRequireUtf8(): String = { + def readStringRequireUtf8(): String = val size: Int = readRawVarint32() var bytes: Array[Byte] = Array() var pos = bufferPos; - if (size <= (bufferSize - pos) && size > 0) { + if (size <= (bufferSize - pos) && size > 0) // Fast path: We already have the bytes in a contiguous buffer, so // just copy directly from it. bytes = buffer; bufferPos = pos + size; - } else if (size == 0) { + else if (size == 0) return ""; - } else { + else // Slow path: Build a byte array first then copy it. bytes = readRawBytesSlowPath(size); pos = 0; - } // TODO(martinrb): We could save a pass by validating while decoding. // if (!Utf8.isValidUtf8(bytes, pos, pos + size)) { // throw InvalidProtocolBufferException.invalidUtf8(); // } return new String(bytes, pos, size, "UTF-8"); - } - def checkLastTagWas(value: Int): Unit = { - if (lastTag != value) { + def checkLastTagWas(value: Int): Unit = + if (lastTag != value) throw InvalidProtocolBufferException.invalidEndTag(); - } - } - def getBytesUntilLimit: Int = { - if (currentLimit == Integer.MAX_VALUE) { + def getBytesUntilLimit: Int = + if (currentLimit == Integer.MAX_VALUE) return -1 - } val currentAbsolutePosition: Int = totalBytesRetired + bufferPos return currentLimit - currentAbsolutePosition - } /** Read a {@code double} field value from the stream. */ - def readDouble(): Double = { + def readDouble(): Double = return java.lang.Double.longBitsToDouble(readRawLittleEndian64()) - } /** Read a {@code float} field value from the stream. */ - def readFloat(): Float = { + def readFloat(): Float = java.lang.Float.intBitsToFloat(readRawLittleEndian32()) - } /** Read a {@code uint64} field value from the stream. */ - def readUInt64(): Long = { + def readUInt64(): Long = readRawVarint64() - } /** Read an {@code int64} field value from the stream. */ - def readInt64(): Long = { + def readInt64(): Long = readRawVarint64() - } /** Read an {@code int32} field value from the stream. */ - def readInt32(): Int = { + def readInt32(): Int = readRawVarint32() - } /** Read a {@code fixed64} field value from the stream. */ - def readFixed64(): Long = { + def readFixed64(): Long = readRawLittleEndian64() - } /** Read a {@code fixed32} field value from the stream. */ - def readFixed32(): Int = { + def readFixed32(): Int = readRawLittleEndian32() - } /** Read a {@code uint32} field value from the stream. */ - def readUInt32(): Int = { + def readUInt32(): Int = readRawVarint32() - } /** * Read an enum field value from the stream. Caller is responsible * for converting the numeric value to an actual enum. */ - def readEnum(): Int = { + def readEnum(): Int = readRawVarint32() - } /** Read an {@code sfixed32} field value from the stream. */ - def readSFixed32(): Int = { + def readSFixed32(): Int = readRawLittleEndian32() - } /** Read an {@code sfixed64} field value from the stream. */ - def readSFixed64(): Long = { + def readSFixed64(): Long = readRawLittleEndian64() - } /** Read an {@code sint32} field value from the stream. */ - def readSInt32(): Int = { + def readSInt32(): Int = decodeZigZag32(readRawVarint32()) - } /** Read an {@code sint64} field value from the stream. */ - def readSInt64(): Long = { + def readSInt64(): Long = decodeZigZag64(readRawVarint64()) - } /** Read a {@code bool} field value from the stream. */ - def readBool(): Boolean = { + def readBool(): Boolean = readRawVarint64() != 0 - } /** Read a raw Varint from the stream. */ @throws[InvalidProtocolBufferException] - def readRawVarint64(): Long = { + def readRawVarint64(): Long = var pos: Int = bufferPos - if (bufferSize == pos) { + if (bufferSize == pos) return readRawVarint64SlowPath() - } val buffer: Array[Byte] = this.buffer var x: Long = 0L var y: Int = 0 @@ -569,117 +479,98 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { y = buffer(({ pos += 1; pos - 1 })); y - })) >= 0) { + })) >= 0) bufferPos = pos return y - } - else if (bufferSize - pos < 9) { + else if (bufferSize - pos < 9) return readRawVarint64SlowPath() - } else if ((({ y ^= (buffer(({ pos += 1; pos - 1 })) << 7); y - })) < 0) { + })) < 0) x = y ^ (~0 << 7) - } else if ((({ y ^= (buffer(({ pos += 1; pos - 1 })) << 14); y - })) >= 0) { + })) >= 0) x = y ^ ((~0 << 7) ^ (~0 << 14)) - } else if ((({ y ^= (buffer(({ pos += 1; pos - 1 })) << 21); y - })) < 0) { + })) < 0) x = y ^ ((~0 << 7) ^ (~0 << 14) ^ (~0 << 21)) - } else if ((({ x = (y.toLong) ^ (buffer(({ pos += 1; pos - 1 })).toLong << 28); x - })) >= 0L) { + })) >= 0L) x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) - } else if ((({ x ^= (buffer(({ pos += 1; pos - 1 })).toLong << 35); x - })) < 0L) { + })) < 0L) x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35) - } else if ((({ x ^= (buffer(({ pos += 1; pos - 1 })).toLong << 42); x - })) >= 0L) { + })) >= 0L) x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35) ^ (~0L << 42) - } else if ((({ x ^= (buffer(({ pos += 1; pos - 1 })).toLong << 49); x - })) < 0L) { + })) < 0L) x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35) ^ (~0L << 42) ^ (~0L << 49) - } - else { + else x ^= (buffer(({ pos += 1; pos - 1 })).toLong << 56) x ^= (~0L << 7) ^ (~0L << 14) ^ (~0L << 21) ^ (~0L << 28) ^ (~0L << 35) ^ (~0L << 42) ^ (~0L << 49) ^ (~0L << 56) - if (x < 0L) { + if (x < 0L) if (buffer(({ pos += 1; pos - 1 - })) < 0L) { + })) < 0L) return readRawVarint64SlowPath() - } - } - } bufferPos = pos x - } /** Variant of readRawVarint64 for when uncomfortably close to the limit. */ @throws[InvalidProtocolBufferException] - private[semanticdb] def readRawVarint64SlowPath(): Long = { + private[semanticdb] def readRawVarint64SlowPath(): Long = var result: Long = 0 var shift: Int = 0 - while (shift < 64) { + while (shift < 64) val b: Byte = readRawByte() result |= (b & 0x7F).toLong << shift - if ((b & 0x80) == 0) { + if ((b & 0x80) == 0) return result - } shift += 7 - } throw InvalidProtocolBufferException.malformedVarint() - } /** Read a 32-bit little-endian integer from the stream. */ - def readRawLittleEndian32(): Int = { + def readRawLittleEndian32(): Int = var pos: Int = bufferPos - if (bufferSize - pos < 4) { + if (bufferSize - pos < 4) refillBuffer(4) pos = bufferPos - } val buffer: Array[Byte] = this.buffer bufferPos = pos + 4 (((buffer(pos) & 0xff)) | ((buffer(pos + 1) & 0xff) << 8) | ((buffer(pos + 2) & 0xff) << 16) | ((buffer(pos + 3) & 0xff) << 24)) - } /** Read a 64-bit little-endian integer from the stream. */ - def readRawLittleEndian64(): Long = { + def readRawLittleEndian64(): Long = var pos: Int = bufferPos - if (bufferSize - pos < 8) { + if (bufferSize - pos < 8) refillBuffer(8) pos = bufferPos - } val buffer: Array[Byte] = this.buffer bufferPos = pos + 8 (((buffer(pos).toLong & 0xffL)) | @@ -690,54 +581,44 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { ((buffer(pos + 5).toLong & 0xffL) << 40) | ((buffer(pos + 6).toLong & 0xffL) << 48) | ((buffer(pos + 7).toLong & 0xffL) << 56)) - } /** * Read one byte from the input. */ @throws[InvalidProtocolBufferException] - def readRawByte(): Byte = { - if (bufferPos == bufferSize) { + def readRawByte(): Byte = + if (bufferPos == bufferSize) refillBuffer(1) - } buffer({ bufferPos += 1; bufferPos - 1 }) - } /** * Read a fixed size of bytes from the input. */ @throws[InvalidProtocolBufferException] - def readRawBytes(size: Int): Array[Byte] = { + def readRawBytes(size: Int): Array[Byte] = val pos: Int = bufferPos - if (size <= (bufferSize - pos) && size > 0) { + if (size <= (bufferSize - pos) && size > 0) bufferPos = pos + size Arrays.copyOfRange(buffer, pos, pos + size) - } - else { + else readRawBytesSlowPath(size) - } - } /** * Exactly like readRawBytes, but caller must have already checked the fast * path: (size <= (bufferSize - pos) && size > 0) */ - private def readRawBytesSlowPath(size: Int): Array[Byte] = { - if (size <= 0) { - if (size == 0) { + private def readRawBytesSlowPath(size: Int): Array[Byte] = + if (size <= 0) + if (size == 0) return Internal.EMPTY_BYTE_ARRAY - } - else { + else throw InvalidProtocolBufferException.negativeSize() - } - } - if (totalBytesRetired + bufferPos + size > currentLimit) { + if (totalBytesRetired + bufferPos + size > currentLimit) skipRawBytes(currentLimit - totalBytesRetired - bufferPos) throw InvalidProtocolBufferException.truncatedMessage() - } - if (size < BUFFER_SIZE) { + if (size < BUFFER_SIZE) val bytes: Array[Byte] = new Array[Byte](size) val pos: Int = bufferSize - bufferPos System.arraycopy(buffer, bufferPos, bytes, 0, pos) @@ -746,8 +627,7 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { System.arraycopy(buffer, 0, bytes, pos, size - pos) bufferPos = size - pos bytes - } - else { + else val originalBufferPos: Int = bufferPos val originalBufferSize: Int = bufferSize totalBytesRetired += bufferSize @@ -755,44 +635,34 @@ class SemanticdbInputStream private (buffer: Array[Byte], input: InputStream) { bufferSize = 0 var sizeLeft: Int = size - (originalBufferSize - originalBufferPos) val chunks: mutable.ArrayBuffer[Array[Byte]] = new mutable.ArrayBuffer[Array[Byte]] - while (sizeLeft > 0) { + while (sizeLeft > 0) val chunk: Array[Byte] = new Array[Byte](Math.min(sizeLeft, BUFFER_SIZE)) var pos: Int = 0 - while (pos < chunk.length) { + while (pos < chunk.length) val n: Int = if ((input == null)) -1 else input.read(chunk, pos, chunk.length - pos) - if (n == -1) { + if (n == -1) throw InvalidProtocolBufferException.truncatedMessage() - } totalBytesRetired += n pos += n - } sizeLeft -= chunk.length chunks+=(chunk) - } val bytes: Array[Byte] = new Array[Byte](size) var pos: Int = originalBufferSize - originalBufferPos System.arraycopy(buffer, originalBufferPos, bytes, 0, pos) - for (chunk <- chunks) { + for (chunk <- chunks) System.arraycopy(chunk, 0, bytes, pos, chunk.length) pos += chunk.length - } bytes - } - } def enableAliasing(aliasing: Boolean): Unit = {} - def setSizeLimit(limit: Int): Int = { - if (limit < 0) { + def setSizeLimit(limit: Int): Int = + if (limit < 0) throw new IllegalArgumentException( "Size limit cannot be negative: " + limit) - } val oldLimit: Int = sizeLimit sizeLimit = limit oldLimit - } - def resetSizeCounter(): Unit = { + def resetSizeCounter(): Unit = totalBytesRetired = -bufferPos; - } -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbMessage.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbMessage.scala index e20d1691a05d..1d3ba26035df 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbMessage.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbMessage.scala @@ -1,7 +1,6 @@ package dotty.tools.dotc.semanticdb.internal -trait SemanticdbMessage[A] { +trait SemanticdbMessage[A]: def serializedSize: Int def writeTo(out: SemanticdbOutputStream): Unit def mergeFrom(in: SemanticdbInputStream): A -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala index 37da7f868e25..03befb6b2d0c 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbOutputStream.scala @@ -9,121 +9,95 @@ import java.nio.charset.StandardCharsets import SemanticdbOutputStream._ -object SemanticdbOutputStream { +object SemanticdbOutputStream: val DEFAULT_BUFFER_SIZE = 4096 - def computePreferredBufferSize(dataLength: Int): Int = { + def computePreferredBufferSize(dataLength: Int): Int = if (dataLength > DEFAULT_BUFFER_SIZE) return DEFAULT_BUFFER_SIZE dataLength - } - def newInstance(output: OutputStream): SemanticdbOutputStream = { + def newInstance(output: OutputStream): SemanticdbOutputStream = newInstance(output, DEFAULT_BUFFER_SIZE) - } - def newInstance(output: OutputStream, bufferSize: Int): SemanticdbOutputStream = { + def newInstance(output: OutputStream, bufferSize: Int): SemanticdbOutputStream = new SemanticdbOutputStream(output, Array.ofDim[Byte](bufferSize)) - } - def newInstance(byteBuffer: ByteBuffer): SemanticdbOutputStream = { + def newInstance(byteBuffer: ByteBuffer): SemanticdbOutputStream = newInstance(byteBuffer, DEFAULT_BUFFER_SIZE) - } - def newInstance(b: Array[Byte]): SemanticdbOutputStream = { + def newInstance(b: Array[Byte]): SemanticdbOutputStream = new SemanticdbOutputStream(null, b) - } def newInstance( byteBuffer: ByteBuffer, bufferSize: Int - ): SemanticdbOutputStream = { + ): SemanticdbOutputStream = newInstance(new ByteBufferOutputStream(byteBuffer), bufferSize) - } private class ByteBufferOutputStream(private val byteBuffer: ByteBuffer) - extends OutputStream { + extends OutputStream: - override def write(b: Int): Unit = { + override def write(b: Int): Unit = byteBuffer.put(b.toByte) - } - override def write(data: Array[Byte], offset: Int, length: Int): Unit = { + override def write(data: Array[Byte], offset: Int, length: Int): Unit = byteBuffer.put(data, offset, length) - } - } - def computeDoubleSize(fieldNumber: Int, value: Double): Int = { + def computeDoubleSize(fieldNumber: Int, value: Double): Int = computeTagSize(fieldNumber) + computeDoubleSizeNoTag(value) - } - def computeFloatSize(fieldNumber: Int, value: Float): Int = { + def computeFloatSize(fieldNumber: Int, value: Float): Int = computeTagSize(fieldNumber) + computeFloatSizeNoTag(value) - } - def computeUInt64Size(fieldNumber: Int, value: Long): Int = { + def computeUInt64Size(fieldNumber: Int, value: Long): Int = computeTagSize(fieldNumber) + computeUInt64SizeNoTag(value) - } - def computeInt64Size(fieldNumber: Int, value: Long): Int = { + def computeInt64Size(fieldNumber: Int, value: Long): Int = computeTagSize(fieldNumber) + computeInt64SizeNoTag(value) - } - def computeInt32Size(fieldNumber: Int, value: Int): Int = { + def computeInt32Size(fieldNumber: Int, value: Int): Int = computeTagSize(fieldNumber) + computeInt32SizeNoTag(value) - } - def computeFixed64Size(fieldNumber: Int, value: Long): Int = { + def computeFixed64Size(fieldNumber: Int, value: Long): Int = computeTagSize(fieldNumber) + computeFixed64SizeNoTag(value) - } - def computeFixed32Size(fieldNumber: Int, value: Int): Int = { + def computeFixed32Size(fieldNumber: Int, value: Int): Int = computeTagSize(fieldNumber) + computeFixed32SizeNoTag(value) - } - def computeBoolSize(fieldNumber: Int, value: Boolean): Int = { + def computeBoolSize(fieldNumber: Int, value: Boolean): Int = computeTagSize(fieldNumber) + computeBoolSizeNoTag(value) - } - def computeStringSize(fieldNumber: Int, value: String): Int = { + def computeStringSize(fieldNumber: Int, value: String): Int = computeTagSize(fieldNumber) + computeStringSizeNoTag(value) - } // def computeBytesSize(fieldNumber: Int, value: ByteString): Int = { // computeTagSize(fieldNumber) + computeBytesSizeNoTag(value) // } - def computeByteArraySize(fieldNumber: Int, value: Array[Byte]): Int = { + def computeByteArraySize(fieldNumber: Int, value: Array[Byte]): Int = computeTagSize(fieldNumber) + computeByteArraySizeNoTag(value) - } - def computeByteBufferSize(fieldNumber: Int, value: ByteBuffer): Int = { + def computeByteBufferSize(fieldNumber: Int, value: ByteBuffer): Int = computeTagSize(fieldNumber) + computeByteBufferSizeNoTag(value) - } - def computeUInt32Size(fieldNumber: Int, value: Int): Int = { + def computeUInt32Size(fieldNumber: Int, value: Int): Int = computeTagSize(fieldNumber) + computeUInt32SizeNoTag(value) - } - def computeEnumSize(fieldNumber: Int, value: Int): Int = { + def computeEnumSize(fieldNumber: Int, value: Int): Int = computeTagSize(fieldNumber) + computeEnumSizeNoTag(value) - } - def computeSFixed32Size(fieldNumber: Int, value: Int): Int = { + def computeSFixed32Size(fieldNumber: Int, value: Int): Int = computeTagSize(fieldNumber) + computeSFixed32SizeNoTag(value) - } - def computeSFixed64Size(fieldNumber: Int, value: Long): Int = { + def computeSFixed64Size(fieldNumber: Int, value: Long): Int = computeTagSize(fieldNumber) + computeSFixed64SizeNoTag(value) - } - def computeSInt32Size(fieldNumber: Int, value: Int): Int = { + def computeSInt32Size(fieldNumber: Int, value: Int): Int = computeTagSize(fieldNumber) + computeSInt32SizeNoTag(value) - } - def computeSInt64Size(fieldNumber: Int, value: Long): Int = { + def computeSInt64Size(fieldNumber: Int, value: Long): Int = computeTagSize(fieldNumber) + computeSInt64SizeNoTag(value) - } def computeDoubleSizeNoTag(value: Double): Int = LITTLE_ENDIAN_64_SIZE @@ -133,13 +107,11 @@ object SemanticdbOutputStream { def computeInt64SizeNoTag(value: Long): Int = computeRawVarint64Size(value) - def computeInt32SizeNoTag(value: Int): Int = { - if (value >= 0) { + def computeInt32SizeNoTag(value: Int): Int = + if (value >= 0) computeRawVarint32Size(value) - } else { + else 10 - } - } def computeFixed64SizeNoTag(value: Long): Int = LITTLE_ENDIAN_64_SIZE @@ -147,22 +119,19 @@ object SemanticdbOutputStream { def computeBoolSizeNoTag(value: Boolean): Int = 1 - def computeStringSizeNoTag(value: String): Int = { + def computeStringSizeNoTag(value: String): Int = val bytes = value.getBytes(StandardCharsets.UTF_8) computeRawVarint32Size(bytes.length) + bytes.length - } // def computeBytesSizeNoTag(value: ByteString): Int = { // computeRawVarint32Size(value.size) + value.size // } - def computeByteArraySizeNoTag(value: Array[Byte]): Int = { + def computeByteArraySizeNoTag(value: Array[Byte]): Int = computeRawVarint32Size(value.length) + value.length - } - def computeByteBufferSizeNoTag(value: ByteBuffer): Int = { + def computeByteBufferSizeNoTag(value: ByteBuffer): Int = computeRawVarint32Size(value.capacity()) + value.capacity() - } def computeUInt32SizeNoTag(value: Int): Int = computeRawVarint32Size(value) @@ -172,13 +141,11 @@ object SemanticdbOutputStream { def computeSFixed64SizeNoTag(value: Long): Int = LITTLE_ENDIAN_64_SIZE - def computeSInt32SizeNoTag(value: Int): Int = { + def computeSInt32SizeNoTag(value: Int): Int = computeRawVarint32Size(encodeZigZag32(value)) - } - def computeSInt64SizeNoTag(value: Long): Int = { + def computeSInt64SizeNoTag(value: Long): Int = computeRawVarint64Size(encodeZigZag64(value)) - } @SerialVersionUID(-6947486886997889499L) class OutOfSpaceException() @@ -187,19 +154,17 @@ object SemanticdbOutputStream { "out of space." ) - def computeTagSize(fieldNumber: Int): Int = { + def computeTagSize(fieldNumber: Int): Int = computeRawVarint32Size(WireFormat.makeTag(fieldNumber, 0)) - } - def computeRawVarint32Size(value: Int): Int = { + def computeRawVarint32Size(value: Int): Int = if ((value & (0xffffffff << 7)) == 0) return 1 if ((value & (0xffffffff << 14)) == 0) return 2 if ((value & (0xffffffff << 21)) == 0) return 3 if ((value & (0xffffffff << 28)) == 0) return 4 5 - } - def computeRawVarint64Size(value: Long): Int = { + def computeRawVarint64Size(value: Long): Int = if ((value & (0xFFFFFFFFFFFFFFFFL << 7)) == 0) return 1 if ((value & (0xFFFFFFFFFFFFFFFFL << 14)) == 0) return 2 if ((value & (0xFFFFFFFFFFFFFFFFL << 21)) == 0) return 3 @@ -210,7 +175,6 @@ object SemanticdbOutputStream { if ((value & (0xFFFFFFFFFFFFFFFFL << 56)) == 0) return 8 if ((value & (0xFFFFFFFFFFFFFFFFL << 63)) == 0) return 9 10 - } val LITTLE_ENDIAN_32_SIZE = 4 @@ -219,225 +183,180 @@ object SemanticdbOutputStream { def encodeZigZag32(n: Int): Int = (n << 1) ^ (n >> 31) def encodeZigZag64(n: Long): Long = (n << 1) ^ (n >> 63) -} -class SemanticdbOutputStream(output: OutputStream, buffer: Array[Byte]) { - private def refreshBuffer(): Unit = { - if (output == null) { +class SemanticdbOutputStream(output: OutputStream, buffer: Array[Byte]): + private def refreshBuffer(): Unit = + if (output == null) throw new OutOfSpaceException() - } output.write(buffer, 0, position) position = 0 - } - def flush(): Unit = { - if (output != null) { + def flush(): Unit = + if (output != null) refreshBuffer() - } - } - def spaceLeft(): Int = { - if (output == null) { + def spaceLeft(): Int = + if (output == null) limit - position - } else { + else throw new UnsupportedOperationException( "spaceLeft() can only be called on SemanticdbOutputStreams that are " + "writing to a flat array." ) - } - } - def checkNoSpaceLeft(): Unit = { - if (spaceLeft() != 0) { + def checkNoSpaceLeft(): Unit = + if (spaceLeft() != 0) throw new IllegalStateException("Did not write as much data as expected.") - } - } private var position = 0 private val limit = buffer.length private var totalBytesWritten: Int = 0 - def writeDouble(fieldNumber: Int, value: Double): Unit = { + def writeDouble(fieldNumber: Int, value: Double): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED64) writeDoubleNoTag(value) - } - def writeFloat(fieldNumber: Int, value: Float): Unit = { + def writeFloat(fieldNumber: Int, value: Float): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED32) writeFloatNoTag(value) - } - def writeUInt64(fieldNumber: Int, value: Long): Unit = { + def writeUInt64(fieldNumber: Int, value: Long): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT) writeUInt64NoTag(value) - } - def writeInt64(fieldNumber: Int, value: Long): Unit = { + def writeInt64(fieldNumber: Int, value: Long): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT) writeInt64NoTag(value) - } - def writeInt32(fieldNumber: Int, value: Int): Unit = { + def writeInt32(fieldNumber: Int, value: Int): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT) writeInt32NoTag(value) - } - def writeFixed64(fieldNumber: Int, value: Long): Unit = { + def writeFixed64(fieldNumber: Int, value: Long): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED64) writeFixed64NoTag(value) - } - def writeFixed32(fieldNumber: Int, value: Int): Unit = { + def writeFixed32(fieldNumber: Int, value: Int): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED32) writeFixed32NoTag(value) - } - def writeBool(fieldNumber: Int, value: Boolean): Unit = { + def writeBool(fieldNumber: Int, value: Boolean): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT) writeBoolNoTag(value) - } - def writeString(fieldNumber: Int, value: String): Unit = { + def writeString(fieldNumber: Int, value: String): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED) writeStringNoTag(value) - } // def writeBytes(fieldNumber: Int, value: ByteString): Unit = { // writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED) // writeBytesNoTag(value) // } - def writeByteArray(fieldNumber: Int, value: Array[Byte]): Unit = { + def writeByteArray(fieldNumber: Int, value: Array[Byte]): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED) writeByteArrayNoTag(value) - } def writeByteArray( fieldNumber: Int, value: Array[Byte], offset: Int, length: Int - ): Unit = { + ): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED) writeByteArrayNoTag(value, offset, length) - } - def writeByteBuffer(fieldNumber: Int, value: ByteBuffer): Unit = { + def writeByteBuffer(fieldNumber: Int, value: ByteBuffer): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED) writeByteBufferNoTag(value) - } - def writeUInt32(fieldNumber: Int, value: Int): Unit = { + def writeUInt32(fieldNumber: Int, value: Int): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT) writeUInt32NoTag(value) - } - def writeEnum(fieldNumber: Int, value: Int): Unit = { + def writeEnum(fieldNumber: Int, value: Int): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT) writeEnumNoTag(value) - } - def writeSFixed32(fieldNumber: Int, value: Int): Unit = { + def writeSFixed32(fieldNumber: Int, value: Int): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED32) writeSFixed32NoTag(value) - } - def writeSFixed64(fieldNumber: Int, value: Long): Unit = { + def writeSFixed64(fieldNumber: Int, value: Long): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_FIXED64) writeSFixed64NoTag(value) - } - def writeSInt32(fieldNumber: Int, value: Int): Unit = { + def writeSInt32(fieldNumber: Int, value: Int): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT) writeSInt32NoTag(value) - } - def writeSInt64(fieldNumber: Int, value: Long): Unit = { + def writeSInt64(fieldNumber: Int, value: Long): Unit = writeTag(fieldNumber, WireFormat.WIRETYPE_VARINT) writeSInt64NoTag(value) - } - def writeDoubleNoTag(value: Double): Unit = { + def writeDoubleNoTag(value: Double): Unit = writeRawLittleEndian64(java.lang.Double.doubleToLongBits(value)) - } - def writeFloatNoTag(value: Float): Unit = { + def writeFloatNoTag(value: Float): Unit = writeRawLittleEndian32(java.lang.Float.floatToIntBits(value)) - } - def writeUInt64NoTag(value: Long): Unit = { + def writeUInt64NoTag(value: Long): Unit = writeRawVarint64(value) - } - def writeInt64NoTag(value: Long): Unit = { + def writeInt64NoTag(value: Long): Unit = writeRawVarint64(value) - } - def writeInt32NoTag(value: Int): Unit = { - if (value >= 0) { + def writeInt32NoTag(value: Int): Unit = + if (value >= 0) writeRawVarint32(value) - } else { + else writeRawVarint64(value) - } - } - def writeFixed64NoTag(value: Long): Unit = { + def writeFixed64NoTag(value: Long): Unit = writeRawLittleEndian64(value) - } - def writeFixed32NoTag(value: Int): Unit = { + def writeFixed32NoTag(value: Int): Unit = writeRawLittleEndian32(value) - } - def writeBoolNoTag(value: Boolean): Unit = { + def writeBoolNoTag(value: Boolean): Unit = writeRawByte(if (value) 1 else 0) - } - def writeStringNoTag(value: String): Unit = { + def writeStringNoTag(value: String): Unit = val bytes = value.getBytes(StandardCharsets.UTF_8) writeRawVarint32(bytes.length) writeRawBytes(bytes) - } - def writeTag(fieldNumber: Int, wireType: Int): Unit = { + def writeTag(fieldNumber: Int, wireType: Int): Unit = writeRawVarint32(WireFormat.makeTag(fieldNumber, wireType)) - } - def writeRawVarint32(value0: Int): Unit = { + def writeRawVarint32(value0: Int): Unit = var value = value0 - while (true) { - if ((value & ~0x7F) == 0) { + while (true) + if ((value & ~0x7F) == 0) writeRawByte(value) return - } else { + else writeRawByte((value & 0x7F) | 0x80) value >>>= 7 - } - } - } - def writeRawVarint64(value0: Long): Unit = { + def writeRawVarint64(value0: Long): Unit = var value = value0 - while (true) { - if ((value & ~0x7FL) == 0) { + while (true) + if ((value & ~0x7FL) == 0) writeRawByte(value.toInt) return - } else { + else writeRawByte((value.toInt & 0x7F) | 0x80) value >>>= 7 - } - } - } - def writeRawLittleEndian32(value: Int): Unit = { + def writeRawLittleEndian32(value: Int): Unit = writeRawByte((value) & 0xFF) writeRawByte((value >> 8) & 0xFF) writeRawByte((value >> 16) & 0xFF) writeRawByte((value >> 24) & 0xFF) - } - def writeRawLittleEndian64(value: Long): Unit = { + def writeRawLittleEndian64(value: Long): Unit = writeRawByte((value).toInt & 0xFF) writeRawByte((value >> 8).toInt & 0xFF) writeRawByte((value >> 16).toInt & 0xFF) @@ -446,67 +365,54 @@ class SemanticdbOutputStream(output: OutputStream, buffer: Array[Byte]) { writeRawByte((value >> 40).toInt & 0xFF) writeRawByte((value >> 48).toInt & 0xFF) writeRawByte((value >> 56).toInt & 0xFF) - } // def writeBytesNoTag(value: ByteString): Unit = { // writeRawVarint32(value.size) // writeRawBytes(value) // } - def writeByteArrayNoTag(value: Array[Byte]): Unit = { + def writeByteArrayNoTag(value: Array[Byte]): Unit = writeRawVarint32(value.length) writeRawBytes(value) - } def writeByteArrayNoTag( value: Array[Byte], offset: Int, length: Int - ): Unit = { + ): Unit = writeRawVarint32(length) writeRawBytes(value, offset, length) - } - def writeByteBufferNoTag(value: ByteBuffer): Unit = { + def writeByteBufferNoTag(value: ByteBuffer): Unit = writeRawVarint32(value.capacity()) writeRawBytes(value) - } - def writeUInt32NoTag(value: Int): Unit = { + def writeUInt32NoTag(value: Int): Unit = writeRawVarint32(value) - } - def writeEnumNoTag(value: Int): Unit = { + def writeEnumNoTag(value: Int): Unit = writeInt32NoTag(value) - } - def writeSFixed32NoTag(value: Int): Unit = { + def writeSFixed32NoTag(value: Int): Unit = writeRawLittleEndian32(value) - } - def writeSFixed64NoTag(value: Long): Unit = { + def writeSFixed64NoTag(value: Long): Unit = writeRawLittleEndian64(value) - } - def writeSInt32NoTag(value: Int): Unit = { + def writeSInt32NoTag(value: Int): Unit = writeRawVarint32(encodeZigZag32(value)) - } - def writeSInt64NoTag(value: Long): Unit = { + def writeSInt64NoTag(value: Long): Unit = writeRawVarint64(encodeZigZag64(value)) - } - def writeRawByte(value: Byte): Unit = { - if (position == limit) { + def writeRawByte(value: Byte): Unit = + if (position == limit) refreshBuffer() - } buffer(position) = value position += 1 - } - def writeRawByte(value: Int): Unit = { + def writeRawByte(value: Int): Unit = writeRawByte(value.toByte) - } // def writeRawBytes(value: ByteString): Unit = { // var offset = 0 @@ -539,53 +445,47 @@ class SemanticdbOutputStream(output: OutputStream, buffer: Array[Byte]) { // } // } - def writeRawBytes(value: Array[Byte]): Unit = { + def writeRawBytes(value: Array[Byte]): Unit = writeRawBytes(value, 0, value.length) - } - def writeRawBytes(value: ByteBuffer): Unit = { - if (value.hasArray()) { + def writeRawBytes(value: ByteBuffer): Unit = + if (value.hasArray()) writeRawBytes(value.array(), value.arrayOffset(), value.capacity()) - } else { + else val duplicated = value.duplicate() duplicated.clear() writeRawBytesInternal(duplicated) - } - } - private def writeRawBytesInternal(value: ByteBuffer): Unit = { + private def writeRawBytesInternal(value: ByteBuffer): Unit = var length = value.remaining() - if (limit - position >= length) { + if (limit - position >= length) value.get(buffer, position, length) position += length totalBytesWritten += length - } else { + else val bytesWritten = limit - position value.get(buffer, position, bytesWritten) length -= bytesWritten position = limit totalBytesWritten += bytesWritten refreshBuffer() - while (length > limit) { + while (length > limit) value.get(buffer, 0, limit) output.write(buffer, 0, limit) length -= limit totalBytesWritten += limit - } value.get(buffer, 0, length) position = length totalBytesWritten += length - } - } - def writeRawBytes(value: Array[Byte], offset0: Int, length0: Int): Unit = { + def writeRawBytes(value: Array[Byte], offset0: Int, length0: Int): Unit = var offset = offset0 var length = length0 - if (limit - position >= length) { + if (limit - position >= length) System.arraycopy(value, offset, buffer, position, length) position += length totalBytesWritten += length - } else { + else val bytesWritten = limit - position System.arraycopy(value, offset, buffer, position, bytesWritten) offset += bytesWritten @@ -593,18 +493,13 @@ class SemanticdbOutputStream(output: OutputStream, buffer: Array[Byte]) { position = limit totalBytesWritten += bytesWritten refreshBuffer() - if (length <= limit) { + if (length <= limit) System.arraycopy(value, offset, buffer, 0, length) position = length - } else { + else output.write(value, offset, length) - } totalBytesWritten += length - } - } -} -object Internal { +object Internal: val EMPTY_BYTE_ARRAY: Array[Byte] = Array() -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbTypeMapper.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbTypeMapper.scala index 2310bcdbc97c..48819f7b5f0d 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbTypeMapper.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/SemanticdbTypeMapper.scala @@ -1,17 +1,14 @@ package dotty.tools.dotc.semanticdb.internal -abstract class SemanticdbTypeMapper[BaseType, CustomType] { +abstract class SemanticdbTypeMapper[BaseType, CustomType]: def toCustom(base: BaseType): CustomType def toBase(custom: CustomType): BaseType -} -object SemanticdbTypeMapper { +object SemanticdbTypeMapper: def apply[BaseType, CustomType](baseToCustom: BaseType => CustomType)( customToBase: CustomType => BaseType ): SemanticdbTypeMapper[BaseType, CustomType] = - new SemanticdbTypeMapper[BaseType, CustomType] { + new SemanticdbTypeMapper[BaseType, CustomType]: def toCustom(base: BaseType): CustomType = baseToCustom(base) def toBase(custom: CustomType): BaseType = customToBase(custom) - } -} diff --git a/compiler/src/dotty/tools/dotc/semanticdb/internal/WireFormat.scala b/compiler/src/dotty/tools/dotc/semanticdb/internal/WireFormat.scala index 1a193fe99ca6..e68b156243ef 100644 --- a/compiler/src/dotty/tools/dotc/semanticdb/internal/WireFormat.scala +++ b/compiler/src/dotty/tools/dotc/semanticdb/internal/WireFormat.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.semanticdb.internal -object WireFormat { +object WireFormat: val WIRETYPE_VARINT = 0 val WIRETYPE_FIXED64 = 1 val WIRETYPE_LENGTH_DELIMITED = 2 @@ -17,4 +17,3 @@ object WireFormat { (fieldNumber << TAG_TYPE_BITS) | wireType def getTagFieldNumber(tag: Int): Int = tag >>> TAG_TYPE_BITS -} diff --git a/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala index 8360d8e08211..6bbf7b970990 100644 --- a/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala +++ b/compiler/src/dotty/tools/dotc/staging/CrossStageSafety.scala @@ -50,7 +50,7 @@ import dotty.tools.dotc.util.SrcPos * } * */ -class CrossStageSafety extends TreeMapWithStages { +class CrossStageSafety extends TreeMapWithStages: import tpd._ private val InAnnotation = Property.Key[Unit]() @@ -204,7 +204,7 @@ class CrossStageSafety extends TreeMapWithStages { traverseChildren(tp) }.traverse(tree.tpe) - private def levelError(sym: Symbol, tp: Type, pos: SrcPos)(using Context): tp.type = { + private def levelError(sym: Symbol, tp: Type, pos: SrcPos)(using Context): tp.type = def symStr = if (!tp.isInstanceOf[ThisType]) sym.show else if (sym.is(ModuleClass)) sym.sourceModule.show @@ -220,7 +220,6 @@ class CrossStageSafety extends TreeMapWithStages { | - the definition is at level ${levelOf(sym)}, | - but the access is at level $level.$hint""", pos) tp - } private object CancelledQuote: def unapply(tree: Quote): Option[Tree] = @@ -237,4 +236,3 @@ class CrossStageSafety extends TreeMapWithStages { case Quote(inner, _) => Some(inner) case _ => None rec(tree.expr) -} diff --git a/compiler/src/dotty/tools/dotc/staging/HealType.scala b/compiler/src/dotty/tools/dotc/staging/HealType.scala index 023271960b40..8eb6b40ea55a 100644 --- a/compiler/src/dotty/tools/dotc/staging/HealType.scala +++ b/compiler/src/dotty/tools/dotc/staging/HealType.scala @@ -13,7 +13,7 @@ import dotty.tools.dotc.transform.SymUtils._ import dotty.tools.dotc.typer.Implicits.SearchFailureType import dotty.tools.dotc.util.SrcPos -class HealType(pos: SrcPos)(using Context) extends TypeMap { +class HealType(pos: SrcPos)(using Context) extends TypeMap: /** If the type refers to a locally defined symbol (either directly, or in a pickled type), * check that its staging level matches the current level. @@ -79,7 +79,7 @@ class HealType(pos: SrcPos)(using Context) extends TypeMap { * reference to a type alias containing the equivalent of `${summon[quoted.Type[T]]}.Underlying`. * Emits an error if `T` cannot be healed and returns `T`. */ - protected def tryHeal(tp: TypeRef): Type = { + protected def tryHeal(tp: TypeRef): Type = val reqType = defn.QuotedTypeClass.typeRef.appliedTo(tp) val tag = ctx.typer.inferImplicitArg(reqType, pos.span) tag.tpe match @@ -99,13 +99,10 @@ class HealType(pos: SrcPos)(using Context) extends TypeMap { | |""", pos) tp - } - private def levelError(sym: Symbol, tp: Type, pos: SrcPos): tp.type = { + private def levelError(sym: Symbol, tp: Type, pos: SrcPos): tp.type = report.error( em"""access to $sym from wrong staging level: | - the definition is at level ${levelOf(sym)}, | - but the access is at level $level""", pos) tp - } -} diff --git a/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala index 05b3efab408c..ed789007116a 100644 --- a/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala +++ b/compiler/src/dotty/tools/dotc/staging/StagingLevel.scala @@ -11,7 +11,7 @@ import dotty.tools.dotc.util.SrcPos import scala.collection.mutable -object StagingLevel { +object StagingLevel: /** A key to be used in a context property that tracks the staging level */ private val LevelKey = new Property.Key[Int] @@ -49,4 +49,3 @@ object StagingLevel { val syms1 = syms//.filter(sym => !levelOfMap.contains(sym)) val newMap = syms1.foldLeft(levelOfMap)((acc, sym) => acc.updated(sym, level)) ctx.fresh.setProperty(LevelOfKey, newMap) -} diff --git a/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala index 674dfff2f642..e85e05f05212 100644 --- a/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala +++ b/compiler/src/dotty/tools/dotc/staging/TreeMapWithStages.scala @@ -11,14 +11,14 @@ import dotty.tools.dotc.staging.StagingLevel.* import scala.collection.mutable /** TreeMap that keeps track of staging levels using StagingLevel. */ -abstract class TreeMapWithStages extends TreeMapWithImplicits { +abstract class TreeMapWithStages extends TreeMapWithImplicits: import tpd._ override def transform(tree: Tree)(using Context): Tree = if (tree.source != ctx.source && tree.source.exists) transform(tree)(using ctx.withSource(tree.source)) - else reporting.trace(i"TreeMapWithStages.transform $tree at $level", staging, show = true) { - tree match { + else reporting.trace(i"TreeMapWithStages.transform $tree at $level", staging, show = true): + tree match case Block(stats, _) => val defSyms = stats.collect { case defTree: DefTree => defTree.symbol } super.transform(tree)(using symbolsInCurrentLevel(defSyms)) @@ -44,6 +44,3 @@ abstract class TreeMapWithStages extends TreeMapWithImplicits { case _ => super.transform(tree) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala index 3175ffceae49..f271c8f8d1f8 100644 --- a/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala +++ b/compiler/src/dotty/tools/dotc/transform/AccessProxies.scala @@ -17,7 +17,7 @@ import config.Printers.transforms /** A utility class for generating access proxies. Currently used for * inline accessors and protected accessors. */ -abstract class AccessProxies { +abstract class AccessProxies: import ast.tpd._ import AccessProxies._ @@ -33,17 +33,16 @@ abstract class AccessProxies { private def accessorDefs(cls: Symbol)(using Context): Iterator[DefDef] = for accessor <- cls.info.decls.iterator; accessed <- accessedBy.get(accessor) yield DefDef(accessor.asTerm, prefss => { - def numTypeParams = accessed.info match { + def numTypeParams = accessed.info match case info: PolyType => info.paramNames.length case _ => 0 - } val (targs, argss) = splitArgs(prefss) val (accessRef, forwardedTpts, forwardedArgss) = if (passReceiverAsArg(accessor.name)) (argss.head.head.select(accessed), targs.takeRight(numTypeParams), argss.tail) else (if (accessed.isStatic) ref(accessed) else ref(TermRef(cls.thisType, accessed)), - targs, argss) + targs, argss) val rhs = if (accessor.name.isSetterName && forwardedArgss.nonEmpty && forwardedArgss.head.nonEmpty) // defensive conditions @@ -57,64 +56,57 @@ abstract class AccessProxies { }) /** Add all needed accessors to the `body` of class `cls` */ - def addAccessorDefs(cls: Symbol, body: List[Tree])(using Context): List[Tree] = { + def addAccessorDefs(cls: Symbol, body: List[Tree])(using Context): List[Tree] = val accDefs = accessorDefs(cls).toList transforms.println(i"add accessors for $cls: $accDefs%, %") if (accDefs.isEmpty) body else body ++ accDefs - } - trait Insert { + trait Insert: import ast.tpd._ /** The name of the accessor for definition with given `name` in given `site` */ def accessorNameOf(name: TermName, site: Symbol)(using Context): TermName def needsAccessor(sym: Symbol)(using Context): Boolean - def ifNoHost(reference: RefTree)(using Context): Tree = { + def ifNoHost(reference: RefTree)(using Context): Tree = assert(false, i"no host found for $reference with ${reference.symbol.showLocated} from ${ctx.owner}") reference - } /** A fresh accessor symbol */ - private def newAccessorSymbol(owner: Symbol, name: TermName, info: Type, accessed: Symbol)(using Context): TermSymbol = { + private def newAccessorSymbol(owner: Symbol, name: TermName, info: Type, accessed: Symbol)(using Context): TermSymbol = val sym = newSymbol(owner, name, Synthetic | Method, info, coord = accessed.span).entered if accessed.is(Private) then sym.setFlag(Final) else if sym.allOverriddenSymbols.exists(!_.is(Deferred)) then sym.setFlag(Override) if accessed.hasAnnotation(defn.ExperimentalAnnot) then sym.addAnnotation(defn.ExperimentalAnnot) sym - } /** An accessor symbol, create a fresh one unless one exists already */ - protected def accessorSymbol(owner: Symbol, accessorName: TermName, accessorInfo: Type, accessed: Symbol)(using Context): Symbol = { + protected def accessorSymbol(owner: Symbol, accessorName: TermName, accessorInfo: Type, accessed: Symbol)(using Context): Symbol = def refersToAccessed(sym: Symbol) = accessedBy.get(sym).contains(accessed) - owner.info.decl(accessorName).suchThat(refersToAccessed).symbol.orElse { + owner.info.decl(accessorName).suchThat(refersToAccessed).symbol.orElse: val acc = newAccessorSymbol(owner, accessorName, accessorInfo, accessed) accessedBy(acc) = accessed acc - } - } /** Rewire reference to refer to `accessor` symbol */ private def rewire(reference: RefTree, accessor: Symbol)(using Context): Tree = { - reference match { + reference match case Select(qual, _) if qual.tpe.derivesFrom(accessor.owner) => qual.select(accessor) case _ => ref(accessor) - } }.withSpan(reference.span) /** Given a reference to a getter accessor, the corresponding setter reference */ - def useSetter(getterRef: Tree)(using Context): Tree = getterRef match { + def useSetter(getterRef: Tree)(using Context): Tree = getterRef match case getterRef: RefTree => val getter = getterRef.symbol.asTerm val accessed = accessedBy(getter) val setterName = getter.name.setterName - def toSetterInfo(getterInfo: Type): Type = getterInfo match { + def toSetterInfo(getterInfo: Type): Type = getterInfo match case getterInfo: LambdaType => getterInfo.derivedLambdaType(resType = toSetterInfo(getterInfo.resType)) case _ => MethodType(getterInfo :: Nil, defn.UnitType) - } val setterInfo = toSetterInfo(getter.info.widenExpr) val setter = accessorSymbol(getter.owner, setterName, setterInfo, accessed) rewire(getterRef, setter) @@ -122,7 +114,6 @@ abstract class AccessProxies { cpy.Apply(getterRef)(useSetter(fn), args) case TypeApply(fn, args) => cpy.TypeApply(getterRef)(useSetter(fn), args) - } /** Create an accessor unless one exists already, and replace the original * access with a reference to the accessor. @@ -130,10 +121,10 @@ abstract class AccessProxies { * @param reference The original reference to the non-public symbol * @param onLHS The reference is on the left-hand side of an assignment */ - def useAccessor(reference: RefTree)(using Context): Tree = { + def useAccessor(reference: RefTree)(using Context): Tree = val accessed = reference.symbol.asTerm var accessorClass = hostForAccessorOf(accessed: Symbol) - if (accessorClass.exists) { + if (accessorClass.exists) if accessorClass.is(Package) then accessorClass = ctx.owner.topLevelClass val accessorName = accessorNameOf(accessed.name, accessorClass) @@ -141,28 +132,22 @@ abstract class AccessProxies { accessed.info.ensureMethodic.asSeenFrom(accessorClass.thisType, accessed.owner) val accessor = accessorSymbol(accessorClass, accessorName, accessorInfo, accessed) rewire(reference, accessor) - } else ifNoHost(reference) - } /** Replace tree with a reference to an accessor if needed */ - def accessorIfNeeded(tree: Tree)(using Context): Tree = tree match { + def accessorIfNeeded(tree: Tree)(using Context): Tree = tree match case tree: RefTree if needsAccessor(tree.symbol) => - if (tree.symbol.isConstructor) { + if (tree.symbol.isConstructor) report.error("Implementation restriction: cannot use private constructors in inlineable methods", tree.srcPos) tree // TODO: create a proper accessor for the private constructor - } else useAccessor(tree) case _ => tree - } - } -} -object AccessProxies { +object AccessProxies: /** Where an accessor for the `accessed` symbol should be placed. * This is the closest enclosing class that has `accessed` as a member. */ - def hostForAccessorOf(accessed: Symbol)(using Context): Symbol = { + def hostForAccessorOf(accessed: Symbol)(using Context): Symbol = def recur(cls: Symbol): Symbol = if (!cls.exists) NoSymbol else if cls.derivesFrom(accessed.owner) @@ -170,5 +155,3 @@ object AccessProxies { then cls else recur(cls.owner) recur(ctx.owner) - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala index 872c7cc897de..67a676106165 100644 --- a/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala +++ b/compiler/src/dotty/tools/dotc/transform/ArrayApply.scala @@ -15,7 +15,7 @@ import dotty.tools.dotc.ast.tpd * * Transforms `scala.Array.apply([....])` and `scala.Array.apply(..., [....])` into `[...]` */ -class ArrayApply extends MiniPhase { +class ArrayApply extends MiniPhase: import tpd._ override def phaseName: String = ArrayApply.name @@ -24,7 +24,7 @@ class ArrayApply extends MiniPhase { override def transformApply(tree: tpd.Apply)(using Context): tpd.Tree = if isArrayModuleApply(tree.symbol) then - tree.args match { + tree.args match case StripAscription(Apply(wrapRefArrayMeth, (seqLit: tpd.JavaSeqLiteral) :: Nil)) :: ct :: Nil if defn.WrapArrayMethods().contains(wrapRefArrayMeth.symbol) && elideClassTag(ct) => seqLit @@ -35,7 +35,6 @@ class ArrayApply extends MiniPhase { case _ => tree - } else tree @@ -48,28 +47,23 @@ class ArrayApply extends MiniPhase { * - `ClassTag.apply(java.lang.XYZ.Type)` for boxed primitives `XYZ`` * - `ClassTag.XYZ` for primitive types */ - private def elideClassTag(ct: Tree)(using Context): Boolean = ct match { + private def elideClassTag(ct: Tree)(using Context): Boolean = ct match case Apply(_, rc :: Nil) if ct.symbol == defn.ClassTagModule_apply => - rc match { + rc match case _: Literal => true // ClassTag.apply(classOf[XYZ]) case rc: RefTree if rc.name == nme.TYPE_ => // ClassTag.apply(java.lang.XYZ.Type) defn.ScalaBoxedClasses().contains(rc.symbol.maybeOwner.companionClass) case _ => false - } case Apply(ctm: RefTree, _) if ctm.symbol.maybeOwner.companionModule == defn.ClassTagModule => // ClassTag.XYZ nme.ScalaValueNames.contains(ctm.name) case _ => false - } - object StripAscription { - def unapply(tree: Tree)(using Context): Some[Tree] = tree match { + object StripAscription: + def unapply(tree: Tree)(using Context): Some[Tree] = tree match case Typed(expr, _) => unapply(expr) case _ => Some(tree) - } - } -} object ArrayApply: val name: String = "arrayApply" diff --git a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala index e783961649dd..97996d92a065 100644 --- a/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ArrayConstructors.scala @@ -18,25 +18,24 @@ import scala.collection.immutable.:: * It assummes that generic arrays have already been handled by typer(see Applications.convertNewGenericArray). * Additionally it optimizes calls to scala.Array.ofDim functions by replacing them with calls to newArray with specific dimensions */ -class ArrayConstructors extends MiniPhase { +class ArrayConstructors extends MiniPhase: import ast.tpd._ override def phaseName: String = ArrayConstructors.name override def description: String = ArrayConstructors.description - override def transformApply(tree: tpd.Apply)(using Context): tpd.Tree = { + override def transformApply(tree: tpd.Apply)(using Context): tpd.Tree = def expand(elemType: Type, dims: List[Tree]) = tpd.newArray(elemType, tree.tpe, tree.span, JavaSeqLiteral(dims, TypeTree(defn.IntClass.typeRef))) - if (tree.fun.symbol eq defn.ArrayConstructor) { + if (tree.fun.symbol eq defn.ArrayConstructor) val TypeApply(tycon, targ :: Nil) = tree.fun: @unchecked expand(targ.tpe, tree.args) - } - else if ((tree.fun.symbol.maybeOwner eq defn.ArrayModuleClass) && (tree.fun.symbol.name eq nme.ofDim) && !tree.tpe.isInstanceOf[MethodicType]) { + else if ((tree.fun.symbol.maybeOwner eq defn.ArrayModuleClass) && (tree.fun.symbol.name eq nme.ofDim) && !tree.tpe.isInstanceOf[MethodicType]) val Apply(Apply(TypeApply(_, List(tp)), _), _) = tree: @unchecked val cs = tp.tpe.classSymbol - tree.fun match { + tree.fun match case Apply(TypeApply(t: Ident, targ), dims) if !TypeErasure.isGeneric(targ.head.tpe) && !ValueClasses.isDerivedValueClass(cs) => expand(targ.head.tpe, dims) @@ -44,12 +43,8 @@ class ArrayConstructors extends MiniPhase { if !TypeErasure.isGeneric(targ.head.tpe) && !ValueClasses.isDerivedValueClass(cs) => Block(t.qualifier :: Nil, expand(targ.head.tpe, dims)) case _ => tree - } - } else tree - } -} object ArrayConstructors: val name: String = "arrayConstructors" diff --git a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala index 0c1f40d4f2bd..7755fa1b3086 100644 --- a/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala +++ b/compiler/src/dotty/tools/dotc/transform/BeanProperties.scala @@ -33,12 +33,12 @@ class BeanProperties(thisPhase: DenotTransformer): info = MethodType(Nil, valDef.denot.info), coord = annot.tree.span ).enteredAfter(thisPhase).asTerm - .withAnnotationsCarrying(valDef.symbol, defn.BeanGetterMetaAnnot) + .withAnnotationsCarrying(valDef.symbol, defn.BeanGetterMetaAnnot) val body: Tree = ref(valDef.symbol) DefDef(meth, body).withSpan(meth.span) def maybeGenerateSetter(valDef: ValDef, annot: Annotation)(using Context): Option[Tree] = - Option.when(valDef.denot.asSymDenotation.flags.is(Mutable)) { + Option.when(valDef.denot.asSymDenotation.flags.is(Mutable)): val owner = ctx.owner val meth = newSymbol( owner, @@ -47,10 +47,9 @@ class BeanProperties(thisPhase: DenotTransformer): info = MethodType(valDef.name :: Nil, valDef.denot.info :: Nil, defn.UnitType), coord = annot.tree.span ).enteredAfter(thisPhase).asTerm - .withAnnotationsCarrying(valDef.symbol, defn.BeanSetterMetaAnnot) + .withAnnotationsCarrying(valDef.symbol, defn.BeanSetterMetaAnnot) def body(params: List[List[Tree]]): Tree = Assign(ref(valDef.symbol), params.head.head) DefDef(meth, body).withSpan(meth.span) - } def prefixedName(prefix: String, valName: Name) = (prefix + valName.lastPart.toString.capitalize).toTermName diff --git a/compiler/src/dotty/tools/dotc/transform/Bridges.scala b/compiler/src/dotty/tools/dotc/transform/Bridges.scala index 569b16681cde..00ef7be22215 100644 --- a/compiler/src/dotty/tools/dotc/transform/Bridges.scala +++ b/compiler/src/dotty/tools/dotc/transform/Bridges.scala @@ -15,14 +15,14 @@ import TypeErasure.transformInfo import Erasure.Boxing.adaptClosure /** A helper class for generating bridge methods in class `root`. */ -class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { +class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context): import ast.tpd._ assert(ctx.phase == erasurePhase.next) private val preErasureCtx = ctx.withPhase(erasurePhase) private lazy val elimErasedCtx = ctx.withPhase(elimErasedValueTypePhase.next) - private class BridgesCursor(using Context) extends OverridingPairs.Cursor(root) { + private class BridgesCursor(using Context) extends OverridingPairs.Cursor(root): override def isSubParent(parent: Symbol, bc: Symbol)(using Context) = true @@ -41,7 +41,6 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { override def canBeHandledByParent(sym1: Symbol, sym2: Symbol, parent: Symbol): Boolean = OverridingPairs.isOverridingPair(sym1, sym2, parent.thisType) - } val site = root.thisType @@ -65,25 +64,22 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { * issue an error: A bridge would be needed yet it would clash with the member itself. * See neg/i1905.scala */ - private def addBridgeIfNeeded(member: Symbol, other: Symbol) = { + private def addBridgeIfNeeded(member: Symbol, other: Symbol) = def bridgeExists = bridgesScope.lookupAll(member.name).exists(bridge => bridgeTarget(bridge) == member && bridge.signature == other.signature) def info(sym: Symbol)(using Context) = sym.info - def desc(sym: Symbol)= { - val infoStr = info(sym)(using preErasureCtx) match { + def desc(sym: Symbol)= + val infoStr = info(sym)(using preErasureCtx) match case ExprType(info) => i": $info" case info => info.show - } i"$sym$infoStr in ${sym.owner}" - } - if (member.signature == other.signature) { + if (member.signature == other.signature) if (!member.info.matches(other.info)) report.error(em"""bridge generated for member ${desc(member)} |which overrides ${desc(other)} |clashes with definition of the member itself; both have erased type ${info(member)(using elimErasedCtx)}."""", bridgePosFor(member)) - } else if !inContext(preErasureCtx)(site.memberInfo(member).matches(site.memberInfo(other))) then // Neither symbol signatures nor pre-erasure types seen from root match; this means // according to Scala 2 semantics there is no override. @@ -92,11 +88,10 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { report.log(i"suppress bridge in $root for ${member} in ${member.owner} and ${other.showLocated} since member infos ${site.memberInfo(member)} and ${site.memberInfo(other)} do not match") else if !bridgeExists then addBridge(member, other) - } /** Generate bridge between `member` and `other` */ - private def addBridge(member: Symbol, other: Symbol) = { + private def addBridge(member: Symbol, other: Symbol) = val bridge = other.copy( owner = root, flags = (member.flags | Method | Bridge | Artifact) &~ @@ -111,10 +106,9 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { bridgeTarget(bridge) = member bridgesScope.enter(bridge) - if (other.owner == root) { + if (other.owner == root) root.delete(other) toBeRemoved += other - } val memberCount = contextResultCount(member) @@ -167,7 +161,6 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { else etaExpand(ref, argss.head) bridges += DefDef(bridge, bridgeRhs(_).withSpan(bridge.span)) - } /** Add all necessary bridges to template statements `stats`, and remove at the same * time deferred methods in `stats` that are replaced by a bridge with the same signature. @@ -180,4 +173,3 @@ class Bridges(root: ClassSymbol, thisPhase: DenotTransformer)(using Context) { opc.next() if bridges.isEmpty then stats else stats.filterNot(stat => toBeRemoved contains stat.symbol) ::: bridges.toList -} diff --git a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala index 32bcc53184b1..c0218830230d 100644 --- a/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala +++ b/compiler/src/dotty/tools/dotc/transform/CapturedVars.scala @@ -34,7 +34,7 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: override def initContext(ctx: FreshContext): Unit = Captured = ctx.addLocation(util.ReadOnlySet.empty) - private class RefInfo(using Context) { + private class RefInfo(using Context): /** The classes for which a Ref type exists. */ val refClassKeys: collection.Set[Symbol] = defn.ScalaNumericValueClasses() `union` Set(defn.BooleanClass, defn.ObjectClass) @@ -50,82 +50,67 @@ class CapturedVars extends MiniPhase with IdentityDenotTransformer: val objectRefClasses: collection.Set[Symbol] = Set(refClass(defn.ObjectClass), volatileRefClass(defn.ObjectClass)) - } private var myRefInfo: RefInfo | Null = null - private def refInfo(using Context): RefInfo = { + private def refInfo(using Context): RefInfo = if (myRefInfo == null) myRefInfo = new RefInfo() myRefInfo.uncheckedNN - } - private class CollectCaptured extends TreeTraverser { + private class CollectCaptured extends TreeTraverser: private val captured = util.HashSet[Symbol]() - def traverse(tree: Tree)(using Context) = tree match { + def traverse(tree: Tree)(using Context) = tree match case id: Ident => val sym = id.symbol - if (sym.is(Mutable, butNot = Method) && sym.owner.isTerm) { + if (sym.is(Mutable, butNot = Method) && sym.owner.isTerm) val enclMeth = ctx.owner.enclosingMethod - if (sym.enclosingMethod != enclMeth) { + if (sym.enclosingMethod != enclMeth) report.log(i"capturing $sym in ${sym.enclosingMethod}, referenced from $enclMeth") captured += sym - } - } case _ => traverseChildren(tree) - } - def runOver(tree: Tree)(using Context): util.ReadOnlySet[Symbol] = { + def runOver(tree: Tree)(using Context): util.ReadOnlySet[Symbol] = traverse(tree) captured - } - } - override def prepareForUnit(tree: Tree)(using Context): Context = { - val captured = atPhase(thisPhase) { + override def prepareForUnit(tree: Tree)(using Context): Context = + val captured = atPhase(thisPhase): CollectCaptured().runOver(ctx.compilationUnit.tpdTree) - } ctx.fresh.updateStore(Captured, captured) - } /** The {Volatile|}{Int|Double|...|Object}Ref class corresponding to the class `cls`, * depending on whether the reference should be @volatile */ - def refClass(cls: Symbol, isVolatile: Boolean)(using Context): Symbol = { + def refClass(cls: Symbol, isVolatile: Boolean)(using Context): Symbol = val refMap = if (isVolatile) refInfo.volatileRefClass else refInfo.refClass if (cls.isClass) refMap.getOrElse(cls, refMap(defn.ObjectClass)) else refMap(defn.ObjectClass) - } - override def prepareForValDef(vdef: ValDef)(using Context): Context = { + override def prepareForValDef(vdef: ValDef)(using Context): Context = val sym = atPhase(thisPhase)(vdef.symbol) - if (captured contains sym) { + if (captured contains sym) val newd = atPhase(thisPhase)(sym.denot).copySymDenotation( info = refClass(sym.info.classSymbol, sym.hasAnnotation(defn.VolatileAnnot)).typeRef, initFlags = sym.flags &~ Mutable) newd.removeAnnotation(defn.VolatileAnnot) newd.installAfter(thisPhase) - } ctx - } - override def transformValDef(vdef: ValDef)(using Context): Tree = { + override def transformValDef(vdef: ValDef)(using Context): Tree = val vble = vdef.symbol - if (captured.contains(vble)) { + if (captured.contains(vble)) def boxMethod(name: TermName): Tree = ref(vble.info.classSymbol.companionModule.info.member(name).symbol) cpy.ValDef(vdef)( rhs = boxMethod(nme.create).appliedTo(vdef.rhs), tpt = TypeTree(vble.info).withSpan(vdef.tpt.span)) - } else vdef - } - override def transformIdent(id: Ident)(using Context): Tree = { + override def transformIdent(id: Ident)(using Context): Tree = val vble = id.symbol if (captured.contains(vble)) id.select(nme.elem).ensureConforms(atPhase(thisPhase)(vble.denot).info) else id - } /** If assignment is to a boxed ref type, e.g. * diff --git a/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala b/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala index f43d000bbf44..f09ec458402d 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckNoSuperThis.scala @@ -27,24 +27,23 @@ class CheckNoSuperThis extends MiniPhase: if mdef.symbol.isClassConstructor then mdef.rhs match case Block(stats, _) => splitAtSuper(stats) match - case (Apply(_, superArgs) :: _, _) => - val cls = mdef.symbol.owner - def fail(t: Tree) = - report.error(em"super constructor cannot be passed a self reference $t unless parameter is declared by-name", t.srcPos) - for arg <- superArgs do - arg.foreachSubTree { - case t: This if t.symbol == cls => - fail(t) - case t: RefTree => t.tpe match - case tpe @ TermRef(prefix, _) - if (prefix == cls.thisType + case (Apply(_, superArgs) :: _, _) => + val cls = mdef.symbol.owner + def fail(t: Tree) = + report.error(em"super constructor cannot be passed a self reference $t unless parameter is declared by-name", t.srcPos) + for arg <- superArgs do + arg.foreachSubTree: + case t: This if t.symbol == cls => + fail(t) + case t: RefTree => t.tpe match + case tpe @ TermRef(prefix, _) + if (prefix == cls.thisType || cls.is(Module) && (prefix.termSymbol == cls.sourceModule || tpe.symbol == cls.sourceModule) - ) && !tpe.symbol.is(JavaStatic) => fail(t) + ) && !tpe.symbol.is(JavaStatic) => fail(t) + case _ => case _ => - case _ => - } - case _ => + case _ => case _ => mdef diff --git a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala index b63773687f74..b99ef81cc10f 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckReentrant.scala @@ -26,7 +26,7 @@ import Decorators._ * in an immutable way anyway. To do better, it would be helpful to have a type * for immutable array. */ -class CheckReentrant extends MiniPhase { +class CheckReentrant extends MiniPhase: import ast.tpd._ override def phaseName: String = CheckReentrant.name @@ -52,40 +52,33 @@ class CheckReentrant extends MiniPhase { // We would add @sharable annotations on ScalaJSVersions and // VersionChecks but we do not have control over that code - def scanning(sym: Symbol)(op: => Unit)(using Context): Unit = { + def scanning(sym: Symbol)(op: => Unit)(using Context): Unit = report.log(i"${" " * indent}scanning $sym") indent += 1 try op finally indent -= 1 - } def addVars(cls: ClassSymbol)(using Context): Unit = - if (!seen.contains(cls) && !isIgnored(cls)) { + if (!seen.contains(cls) && !isIgnored(cls)) seen += cls - scanning(cls) { + scanning(cls): for (sym <- cls.classInfo.decls) if (sym.isTerm && !sym.isSetter && !isIgnored(sym)) - if (sym.is(Mutable)) { + if (sym.is(Mutable)) report.error( em"""possible data race involving globally reachable ${sym.showLocated}: ${sym.info} | use -Ylog:checkReentrant+ to find out more about why the variable is reachable.""") shared += sym - } else if (!sym.is(Method) || sym.isOneOf(Accessor | ParamAccessor)) - scanning(sym) { + scanning(sym): sym.info.widenExpr.classSymbols.foreach(addVars) - } for (parent <- cls.parentSyms) addVars(parent.asClass) - } - } - override def transformTemplate(tree: Template)(using Context): Tree = { + override def transformTemplate(tree: Template)(using Context): Tree = if (ctx.settings.YcheckReentrant.value && tree.symbol.owner.isStaticOwner) addVars(tree.symbol.owner.asClass) tree - } -} object CheckReentrant: val name: String = "checkReentrant" diff --git a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala index 0d5154e212ee..fb704b72d59a 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckStatic.scala @@ -23,18 +23,18 @@ import dotty.tools.dotc.transform.SymUtils._ * Java8 supports those, but not vars, and JavaScript does not have interfaces at all. * 6. `@static` Lazy vals are currently unsupported. */ -class CheckStatic extends MiniPhase { +class CheckStatic extends MiniPhase: import ast.tpd._ override def phaseName: String = CheckStatic.name override def description: String = CheckStatic.description - override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = { + override def transformTemplate(tree: tpd.Template)(using Context): tpd.Tree = val defns = tree.body.collect{case t: ValOrDefDef => t} var hadNonStaticField = false for (defn <- defns) - if (defn.symbol.isScalaStatic) { + if (defn.symbol.isScalaStatic) if (!ctx.owner.isStaticOwner) report.error(StaticFieldsOnlyAllowedInObjects(defn.symbol), defn.srcPos) defn.symbol.resetFlag(JavaStatic) @@ -55,14 +55,10 @@ class CheckStatic extends MiniPhase { report.error(LazyStaticField(), defn.srcPos) else if (defn.symbol.allOverriddenSymbols.nonEmpty) report.error(StaticOverridingNonStaticMembers(), defn.srcPos) - } else hadNonStaticField = hadNonStaticField || defn.isInstanceOf[ValDef] tree - } -} -object CheckStatic { +object CheckStatic: val name: String = "checkStatic" val description: String = "check restrictions that apply to @static members" -} diff --git a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala index bd521c8679d0..f4af5b97bc4c 100644 --- a/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala +++ b/compiler/src/dotty/tools/dotc/transform/CheckUnused.scala @@ -377,13 +377,12 @@ object CheckUnused: def finishAggregation(using Context)(): Unit = val unusedInThisStage = this.getUnused - this.unusedAggregate match { + this.unusedAggregate match case None => this.unusedAggregate = Some(unusedInThisStage) case Some(prevUnused) => val intersection = unusedInThisStage.warnings.intersect(prevUnused.warnings) this.unusedAggregate = Some(UnusedResult(intersection)) - } /** @@ -559,9 +558,9 @@ object CheckUnused: val warnings = List(sortedImp, sortedLocalDefs, sortedExplicitParams, sortedImplicitParams, sortedPrivateDefs, sortedPatVars, unsetLocalDefs, unsetPrivateDefs).flatten.sortBy { s => - val pos = s.pos.sourcePos - (pos.line, pos.column) - } + val pos = s.pos.sourcePos + (pos.line, pos.column) + } UnusedResult(warnings.toSet) end getUnused //============================ HELPERS ==================================== @@ -634,10 +633,9 @@ object CheckUnused: ) extension (tree: ImportSelector) - def boundTpe: Type = tree.bound match { + def boundTpe: Type = tree.bound match case untpd.TypedSplice(tree1) => tree1.tpe case _ => NoType - } extension (sym: Symbol) /** is accessible without import in current context */ @@ -720,12 +718,12 @@ object CheckUnused: (rhs match { case _: tpd.Literal => true case _ => rhs.tpe match - case ConstantType(_) => true - case tp: TermRef => + case ConstantType(_) => true + case tp: TermRef => // Detect Scala 2 SingleType - tp.underlying.classSymbol.is(Flags.Module) - case _ => - false + tp.underlying.classSymbol.is(Flags.Module) + case _ => + false }) def registerTrivial(using Context): Unit = if defdef.isTrivial then diff --git a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala index 179625759b10..a506b9071091 100644 --- a/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala +++ b/compiler/src/dotty/tools/dotc/transform/CollectEntryPoints.scala @@ -40,13 +40,11 @@ class CollectEntryPoints extends MiniPhase: val name = sym.fullName.stripModuleClassSuffix.toString Option.when(sym.isStatic && !sym.is(Flags.Trait) && ctx.platform.hasMainMethod(sym))(name) - private def registerEntryPoint(s: String)(using Context) = { - genBCodePhase match { + private def registerEntryPoint(s: String)(using Context) = + genBCodePhase match case genBCodePhase: GenBCode => genBCodePhase.registerEntryPoint(s) case _ => - } - } object CollectEntryPoints: val name: String = "Collect entry points" diff --git a/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala b/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala index 7b89c8785e05..b8f939192151 100644 --- a/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala +++ b/compiler/src/dotty/tools/dotc/transform/CollectNullableFields.scala @@ -11,10 +11,9 @@ import scala.collection.mutable import java.util.IdentityHashMap -object CollectNullableFields { +object CollectNullableFields: val name: String = "collectNullableFields" val description: String = "collect fields that can be nulled out after use in lazy initialization" -} /** Collect fields that can be nulled out after use in lazy initialization. * @@ -39,7 +38,7 @@ object CollectNullableFields { * - is only used in a lazy val initializer * - defined in the same class as the lazy val */ -class CollectNullableFields extends MiniPhase { +class CollectNullableFields extends MiniPhase: import tpd._ override def phaseName: String = CollectNullableFields.name @@ -56,7 +55,7 @@ class CollectNullableFields extends MiniPhase { /** Whether or not a field is nullable */ private val nullability = new mutable.LinkedHashMap[Symbol, FieldInfo] - private def recordUse(tree: Tree)(using Context): Tree = { + private def recordUse(tree: Tree)(using Context): Tree = val sym = tree.symbol val isNullablePrivateField = sym.isField && @@ -68,7 +67,7 @@ class CollectNullableFields extends MiniPhase { sym.info.widenDealias.typeSymbol.isNullableClassAfterErasure if (isNullablePrivateField) - nullability.get(sym) match { + nullability.get(sym) match case Some(Nullable(from)) if from != ctx.owner => // used in multiple lazy val initializers nullability.put(sym, NotNullable) case None => // not in the map @@ -83,10 +82,8 @@ class CollectNullableFields extends MiniPhase { // Do nothing for: // - NotNullable // - Nullable(ctx.owner) - } tree - } override def transformIdent(tree: Ident)(using Context): Tree = recordUse(tree) @@ -95,16 +92,13 @@ class CollectNullableFields extends MiniPhase { recordUse(tree) /** Map lazy values to the fields they should null after initialization. */ - def lazyValNullables(using Context): IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] = { + def lazyValNullables(using Context): IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] = val result = new IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] - nullability.foreach { + nullability.foreach: case (sym, Nullable(from)) => val bldr = result.computeIfAbsent(from, _ => new mutable.ListBuffer).nn bldr += sym case _ => - } result - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala index b7e8ccf4e7e1..beb602c266ce 100644 --- a/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala +++ b/compiler/src/dotty/tools/dotc/transform/CompleteJavaEnums.scala @@ -16,13 +16,12 @@ import DenotTransformers._ import SymUtils._ -object CompleteJavaEnums { +object CompleteJavaEnums: val name: String = "completeJavaEnums" val description: String = "fill in constructors for Java enums" private val nameParamName: TermName = "_$name".toTermName private val ordinalParamName: TermName = "_$ordinal".toTermName -} /** For Scala enums that inherit from java.lang.Enum: * Add constructor parameters for `name` and `ordinal` to pass from each @@ -49,44 +48,40 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => /** Add constructor parameters `$name: String` and `$ordinal: Int` to the end of * the last parameter list of (method- or poly-) type `tp`. */ - private def addConstrParams(tp: Type)(using Context): Type = tp match { + private def addConstrParams(tp: Type)(using Context): Type = tp match case tp: PolyType => tp.derivedLambdaType(resType = addConstrParams(tp.resType)) case tp: MethodType => - tp.resType match { + tp.resType match case restpe: MethodType => tp.derivedLambdaType(resType = addConstrParams(restpe)) case _ => tp.derivedLambdaType( paramNames = tp.paramNames ++ List(nameParamName, ordinalParamName), paramInfos = tp.paramInfos ++ List(defn.StringType, defn.IntType)) - } - } /** The list of parameter definitions `$name: String, $ordinal: Int`, in given `owner` * with given flags (either `Param` or `ParamAccessor`) */ - private def addedParams(owner: Symbol, isLocal: Boolean, flag: FlagSet)(using Context): List[ValDef] = { + private def addedParams(owner: Symbol, isLocal: Boolean, flag: FlagSet)(using Context): List[ValDef] = val flags = flag | Synthetic | (if isLocal then Private | Deferred else EmptyFlags) val nameParam = newSymbol(owner, nameParamName, flags, defn.StringType, coord = owner.span) val ordinalParam = newSymbol(owner, ordinalParamName, flags, defn.IntType, coord = owner.span) List(ValDef(nameParam), ValDef(ordinalParam)) - } /** Add arguments `args` to the parent constructor application in `parents` that invokes * a constructor of `targetCls`, */ private def addEnumConstrArgs(targetCls: Symbol, parents: List[Tree], args: List[Tree])(using Context): List[Tree] = - parents.map { + parents.map: case app @ Apply(fn, args0) if fn.symbol.owner == targetCls => if args0.nonEmpty && targetCls == defn.JavaEnumClass then report.error(em"the constructor of java.lang.Enum cannot be called explicitly", app.sourcePos) cpy.Apply(app)(fn, args0 ++ args) case p => p - } /** If this is a constructor of a enum class that extends, add $name and $ordinal parameters to it. */ - override def transformDefDef(tree: DefDef)(using Context): DefDef = { + override def transformDefDef(tree: DefDef)(using Context): DefDef = val sym = tree.symbol if sym.isConstructor && sym.owner.derivesFromJavaEnum then val tree1 = cpy.DefDef(tree)( @@ -96,18 +91,17 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => sym.setParamssFromDefs(tree1.paramss) tree1 else tree - } /** Return a list of forwarders for enum values defined in the companion object * for java interop. */ - private def addedEnumForwarders(clazz: Symbol)(using Context): List[MemberDef] = { + private def addedEnumForwarders(clazz: Symbol)(using Context): List[MemberDef] = val moduleCls = clazz.companionClass val moduleRef = ref(clazz.companionModule) val enums = moduleCls.info.decls.filter(member => member.isAllOf(EnumValue)) for { enumValue <- enums } - yield { + yield def forwarderSym(flags: FlagSet, info: Type): Symbol { type ThisName = TermName } = val sym = newSymbol(clazz, enumValue.name.asTermName, flags, info) sym.addAnnotation(Annotations.Annotation(defn.ScalaStaticAnnot, sym.span)) @@ -120,8 +114,6 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => DefDef(forwarderSym(EnumValue | Method | JavaStatic, MethodType(Nil, enumValue.info)), body) else ValDef(forwarderSym(EnumValue | JavaStatic, enumValue.info), body) - } - } private def isJavaEnumValueImpl(cls: Symbol)(using Context): Boolean = cls.isAnonymousClass @@ -157,7 +149,7 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => * "same as before" * } */ - override def transformTemplate(templ: Template)(using Context): Tree = { + override def transformTemplate(templ: Template)(using Context): Tree = val cls = templ.symbol.owner if cls.derivesFromJavaEnum then registerEnumClass(cls) // invariant: class is visited before cases: see tests/pos/enum-companion-first.scala @@ -183,7 +175,6 @@ class CompleteJavaEnums extends MiniPhase with InfoTransformer { thisPhase => enumCaseOrdinals.clear() // remove simple cases // invariant: companion is visited after cases templ else templ - } override def checkPostCondition(tree: Tree)(using Context): Unit = assert(enumCaseOrdinals.isEmpty, "Java based enum ordinal cache was not cleared") diff --git a/compiler/src/dotty/tools/dotc/transform/Constructors.scala b/compiler/src/dotty/tools/dotc/transform/Constructors.scala index 4dd7205e4ee0..2dc04b781d09 100644 --- a/compiler/src/dotty/tools/dotc/transform/Constructors.scala +++ b/compiler/src/dotty/tools/dotc/transform/Constructors.scala @@ -16,10 +16,9 @@ import Decorators._ import DenotTransformers._ import collection.mutable -object Constructors { +object Constructors: val name: String = "constructors" val description: String = "collect initialization code in primary constructors" -} /** This transform * - moves initializers from body to constructor. @@ -57,20 +56,19 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = // 3. It is accessed on an object other than `this` // 4. It is a mutable parameter accessor // 5. It is has a wildcard initializer `_` - private def markUsedPrivateSymbols(tree: RefTree)(using Context): Unit = { + private def markUsedPrivateSymbols(tree: RefTree)(using Context): Unit = val sym = tree.symbol def retain() = retainedPrivateVals.add(sym) - if (sym.exists && sym.owner.isClass && mightBeDropped(sym)) { + if (sym.exists && sym.owner.isClass && mightBeDropped(sym)) val owner = sym.owner.asClass - tree match { + tree match case Ident(_) | Select(This(_), _) => - def inConstructor = { + def inConstructor = val method = ctx.owner.enclosingMethod method.isPrimaryConstructor && ctx.owner.enclosingClass == owner - } if (inConstructor && (sym.is(ParamAccessor) || seenPrivateVals.contains(sym))) { // used inside constructor, accessed on this, @@ -78,40 +76,32 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = } else retain() case _ => retain() - } - } - } - override def transformIdent(tree: tpd.Ident)(using Context): tpd.Tree = { + override def transformIdent(tree: tpd.Ident)(using Context): tpd.Tree = markUsedPrivateSymbols(tree) tree - } - override def transformSelect(tree: tpd.Select)(using Context): tpd.Tree = { + override def transformSelect(tree: tpd.Select)(using Context): tpd.Tree = markUsedPrivateSymbols(tree) tree - } - override def transformValDef(tree: tpd.ValDef)(using Context): tpd.Tree = { + override def transformValDef(tree: tpd.ValDef)(using Context): tpd.Tree = if (mightBeDropped(tree.symbol)) seenPrivateVals += tree.symbol tree - } /** All initializers for non-lazy fields should be moved into constructor. * All non-abstract methods should be implemented (this is assured for constructors * in this phase and for other methods in memoize). */ - override def checkPostCondition(tree: tpd.Tree)(using Context): Unit = { + override def checkPostCondition(tree: tpd.Tree)(using Context): Unit = def emptyRhsOK(sym: Symbol) = sym.isOneOf(DeferredOrLazy) || sym.isConstructor && sym.owner.isAllOf(NoInitsTrait) - tree match { + tree match case tree: ValDef if tree.symbol.exists && tree.symbol.owner.isClass && !tree.symbol.is(Lazy) && !tree.symbol.hasAnnotation(defn.ScalaStaticAnnot) => assert(tree.rhs.isEmpty, i"$tree: initializer should be moved to constructors") case tree: DefDef if !emptyRhsOK(tree.symbol) => assert(!tree.rhs.isEmpty, i"unimplemented: $tree") case _ => - } - } /** @return true if after ExplicitOuter, all references from this tree go via an * outer link, so no parameter accessors need to be rewired to parameters @@ -127,7 +117,7 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = private final val MutableParamAccessor = Mutable | ParamAccessor - override def transformTemplate(tree: Template)(using Context): Tree = { + override def transformTemplate(tree: Template)(using Context): Tree = val cls = ctx.owner.asClass val constr @ DefDef(nme.CONSTRUCTOR, (vparams: List[ValDef] @unchecked) :: Nil, _, EmptyTree) = tree.constr: @unchecked @@ -136,10 +126,9 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = // for any outer parameters, which are last in the sequence of original // parameter accessors but come first in the constructor parameter list. val accessors = cls.paramGetters - val vparamsWithOuterLast = vparams match { + val vparamsWithOuterLast = vparams match case vparam :: rest if vparam.name == nme.OUTER => rest ::: vparam :: Nil case _ => vparams - } val paramSyms = vparamsWithOuterLast map (_.symbol) // Adjustments performed when moving code into the constructor: @@ -148,9 +137,9 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = // (Mutable parameters should be replaced only during the super call) // (2) If the parameter accessor reference was to an alias getter, // drop the () when replacing by the parameter. - object intoConstr extends TreeMap { + object intoConstr extends TreeMap: private var inSuperCall = false - override def transform(tree: Tree)(using Context): Tree = tree match { + override def transform(tree: Tree)(using Context): Tree = tree match case Ident(_) | Select(This(_), _) => var sym = tree.symbol def isOverridableSelect = tree.isInstanceOf[Select] && !sym.isEffectivelyFinal @@ -176,12 +165,10 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = else cpy.Apply(tree)(fn1, Nil) case _ => if (noDirectRefsFrom(tree)) tree else super.transform(tree) - } def apply(tree: Tree, prevOwner: Symbol)(using Context): Tree = inSuperCall = isSuperConstrCall(tree) transform(tree).changeOwnerAfter(prevOwner, constr.symbol, thisPhase) - } def isRetained(acc: Symbol) = !mightBeDropped(acc) || retainedPrivateVals(acc) @@ -189,8 +176,8 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = val constrStats, clsStats = new mutable.ListBuffer[Tree] /** Map outer getters $outer and outer accessors $A$B$$$outer to the given outer parameter. */ - def mapOuter(outerParam: Symbol) = new TreeMap { - override def transform(tree: Tree)(using Context) = tree match { + def mapOuter(outerParam: Symbol) = new TreeMap: + override def transform(tree: Tree)(using Context) = tree match case Apply(fn, Nil) if (fn.symbol.is(OuterAccessor) || fn.symbol.isGetter && fn.symbol.name == nme.OUTER @@ -201,30 +188,27 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = ref(outerParam) case _ => super.transform(tree) - } - } val dropped = mutable.Set[Symbol]() // Split class body into statements that go into constructor and // definitions that are kept as members of the class. - def splitStats(stats: List[Tree]): Unit = stats match { + def splitStats(stats: List[Tree]): Unit = stats match case stat :: stats1 => - stat match { + stat match case stat @ ValDef(name, tpt, _) if !stat.symbol.is(Lazy) && !stat.symbol.hasAnnotation(defn.ScalaStaticAnnot) => val sym = stat.symbol - if (isRetained(sym)) { + if (isRetained(sym)) if (!stat.rhs.isEmpty && !isWildcardArg(stat.rhs)) constrStats += Assign(ref(sym), intoConstr(stat.rhs, sym)).withSpan(stat.span) clsStats += cpy.ValDef(stat)(rhs = EmptyTree) - } - else if (!stat.rhs.isEmpty) { + else if (!stat.rhs.isEmpty) dropped += sym sym.copySymDenotation( initFlags = sym.flags &~ Private, owner = constr.symbol).installAfter(thisPhase) constrStats += intoConstr(stat, sym) - } else + else dropped += sym case stat @ DefDef(name, _, tpt, _) if stat.symbol.isGetter && !stat.symbol.is(Lazy) => val sym = stat.symbol @@ -265,10 +249,8 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = clsStats += stat case _ => constrStats += intoConstr(stat, tree.symbol) - } splitStats(stats1) case Nil => - } /** Check that we do not have both a private field with name `x` and a private field * with name `FieldName(x)`. These will map to the same JVM name and therefore cause @@ -294,71 +276,61 @@ class Constructors extends MiniPhase with IdentityDenotTransformer { thisPhase = // The initializers for the retained accessors */ val copyParams = accessors flatMap { acc => - if (!isRetained(acc)) { + if (!isRetained(acc)) dropped += acc Nil - } - else if (!isRetained(acc.field)) { // It may happen for unit fields, tests/run/i6987.scala + else if (!isRetained(acc.field)) // It may happen for unit fields, tests/run/i6987.scala dropped += acc.field Nil - } - else { + else val param = acc.subst(accessors, paramSyms) if (param.hasAnnotation(defn.ConstructorOnlyAnnot)) report.error(em"${acc.name} is marked `@constructorOnly` but it is retained as a field in ${acc.owner}", acc.srcPos) val target = if (acc.is(Method)) acc.field else acc if (!target.exists) Nil // this case arises when the parameter accessor is an alias - else { + else val assigns = Assign(ref(target), ref(param)).withSpan(tree.span) :: Nil if (acc.name != nme.OUTER) assigns - else { + else // insert test: if ($outer eq null) throw new NullPointerException val nullTest = If(ref(param).select(defn.Object_eq).appliedTo(nullLiteral), Throw(New(defn.NullPointerExceptionClass.typeRef, Nil)), unitLiteral) nullTest :: assigns - } - } - } } // Drop accessors that are not retained from class scope - if (dropped.nonEmpty) { + if (dropped.nonEmpty) val clsInfo = cls.classInfo val decls = clsInfo.decls.filteredScope(!dropped.contains(_)) val clsInfo2 = clsInfo.derivedClassInfo(decls = decls) cls.copySymDenotation(info = clsInfo2).installAfter(thisPhase) // TODO: this happens to work only because Constructors is the last phase in group - } val (superCalls, followConstrStats) = splitAtSuper(constrStats.toList) - val mappedSuperCalls = vparams match { + val mappedSuperCalls = vparams match case (outerParam @ ValDef(nme.OUTER, _, _)) :: _ => superCalls.map(mapOuter(outerParam.symbol).transform) case _ => superCalls - } // Lazy Vals may decide to create an eager val instead of a lazy val // this val should be assigned before constructor body code starts running - val (lazyAssignments, stats) = followConstrStats.partition { + val (lazyAssignments, stats) = followConstrStats.partition: case Assign(l, r) if l.symbol.name.is(NameKinds.LazyLocalName) => true case _ => false - } val finalConstrStats = copyParams ::: mappedSuperCalls ::: lazyAssignments ::: stats val expandedConstr = - if (cls.isAllOf(NoInitsTrait)) { + if (cls.isAllOf(NoInitsTrait)) assert(finalConstrStats.isEmpty || { import dotty.tools.dotc.transform.sjs.JSSymUtils._ ctx.settings.scalajs.value && cls.isJSType }) constr - } else cpy.DefDef(constr)(rhs = Block(finalConstrStats, unitLiteral)) cpy.Template(tree)(constr = expandedConstr, body = clsStats.toList) - } } diff --git a/compiler/src/dotty/tools/dotc/transform/CookComments.scala b/compiler/src/dotty/tools/dotc/transform/CookComments.scala index 27f34891fc2c..b31f51909cb0 100644 --- a/compiler/src/dotty/tools/dotc/transform/CookComments.scala +++ b/compiler/src/dotty/tools/dotc/transform/CookComments.scala @@ -5,14 +5,14 @@ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.ContextOps._ import dotty.tools.dotc.typer.Docstrings -class CookComments extends MegaPhase.MiniPhase { +class CookComments extends MegaPhase.MiniPhase: override def phaseName: String = CookComments.name override def description: String = CookComments.description - override def transformTypeDef(tree: tpd.TypeDef)(using Context): tpd.Tree = { - if (ctx.settings.YcookComments.value && tree.isClassDef) { + override def transformTypeDef(tree: tpd.TypeDef)(using Context): tpd.Tree = + if (ctx.settings.YcookComments.value && tree.isClassDef) val cls = tree.symbol val cookingCtx = ctx.localContext(tree, cls).setNewScope val template = tree.rhs.asInstanceOf[tpd.Template] @@ -23,11 +23,8 @@ class CookComments extends MegaPhase.MiniPhase { } Docstrings.cookComment(cls, cls)(using cookingCtx) - } tree - } -} object CookComments: val name = "cookComments" diff --git a/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala b/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala index 91b5bc6a3de4..0c579376d4b8 100644 --- a/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala +++ b/compiler/src/dotty/tools/dotc/transform/CountOuterAccesses.scala @@ -43,9 +43,8 @@ class CountOuterAccesses extends MiniPhase: // LambdaLift can create outer paths. These need to be known in this phase. /** The number of times an outer accessor that might be dropped is accessed */ - val outerAccessCount = new mutable.HashMap[Symbol, Int] { + val outerAccessCount = new mutable.HashMap[Symbol, Int]: override def default(s: Symbol): Int = 0 - } private def markAccessed(tree: RefTree)(using Context): Tree = val sym = tree.symbol diff --git a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala index 808cf928ecc2..a55a9736591b 100644 --- a/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala +++ b/compiler/src/dotty/tools/dotc/transform/CtxLazy.scala @@ -11,14 +11,11 @@ import core.Contexts._ * A typical use case is a lazy val in a phase object which exists once per root context where * the expression intiializing the lazy val depends only on the root context, but not any changes afterwards. */ -class CtxLazy[T](expr: Context ?=> T) { +class CtxLazy[T](expr: Context ?=> T): private var myValue: T = _ private var forced = false - def apply()(using Context): T = { - if (!forced) { + def apply()(using Context): T = + if (!forced) myValue = expr forced = true - } myValue - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/Dependencies.scala b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala index 0043c43073ed..265371a10658 100644 --- a/compiler/src/dotty/tools/dotc/transform/Dependencies.scala +++ b/compiler/src/dotty/tools/dotc/transform/Dependencies.scala @@ -165,11 +165,10 @@ abstract class Dependencies(root: ast.tpd.Tree, @constructorOnly rootContext: Co println(i"error lambda lifting ${ctx.compilationUnit}: $sym is not visible from $enclosure") throw ex - private def markCalled(callee: Symbol, caller: Symbol)(using Context): Unit = { + private def markCalled(callee: Symbol, caller: Symbol)(using Context): Unit = report.debuglog(i"mark called: $callee of ${callee.owner} is called by $caller in ${caller.owner}") assert(isLocal(callee)) symSet(called, caller) += callee - } protected def process(tree: Tree)(using Context) = val sym = tree.symbol @@ -279,11 +278,10 @@ abstract class Dependencies(root: ast.tpd.Tree, @constructorOnly rootContext: Co do () // initialization - inContext(rootContext) { + inContext(rootContext): CollectDependencies().traverse(root) computeFreeVars() computeLogicOwners() - } object Dependencies: private class NoPath extends Exception end Dependencies diff --git a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala index 151e841f0e48..3e22234eefa5 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimByName.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimByName.scala @@ -76,7 +76,7 @@ class ElimByName extends MiniPhase, InfoTransformer: private def exprBecomesFunction(symd: SymDenotation)(using Context): Boolean = symd.is(Param) || symd.is(ParamAccessor, butNot = Method) - def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match { + def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match case ExprType(rt) if exprBecomesFunction(sym) => defn.ByNameFunction(rt) case tp: MethodType => @@ -92,7 +92,6 @@ class ElimByName extends MiniPhase, InfoTransformer: case tp: PolyType => tp.derivedLambdaType(resType = transformInfo(tp.resType, sym)) case _ => tp - } override def infoMayChange(sym: Symbol)(using Context): Boolean = sym.is(Method) || exprBecomesFunction(sym) @@ -121,16 +120,15 @@ class ElimByName extends MiniPhase, InfoTransformer: override def transformSelect(tree: Select)(using Context): Tree = applyIfFunction(tree) - override def transformTypeApply(tree: TypeApply)(using Context): Tree = tree match { + override def transformTypeApply(tree: TypeApply)(using Context): Tree = tree match case TypeApply(Select(_, nme.asInstanceOf_), arg :: Nil) => // tree might be of form e.asInstanceOf[x.type] where x becomes a function. // See pos/t296.scala applyIfFunction(tree) case _ => tree - } override def transformApply(tree: Apply)(using Context): Tree = - trace(s"transforming ${tree.show} at phase ${ctx.phase}", show = true) { + trace(s"transforming ${tree.show} at phase ${ctx.phase}", show = true): def transformArg(arg: Tree, formal: Type): Tree = formal match case defn.ByNameFunction(formalResult) => @@ -150,14 +148,12 @@ class ElimByName extends MiniPhase, InfoTransformer: val mt @ MethodType(_) = tree.fun.tpe.widen: @unchecked val args1 = tree.args.zipWithConserve(mt.paramInfos)(transformArg) cpy.Apply(tree)(tree.fun, args1) - } override def transformValDef(tree: ValDef)(using Context): Tree = - atPhase(next) { + atPhase(next): if exprBecomesFunction(tree.symbol) then cpy.ValDef(tree)(tpt = tree.tpt.withType(tree.symbol.info)) else tree - } object ElimByName: val name: String = "elimByName" diff --git a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala index 503561915040..9d969f4e8641 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimErasedValueType.scala @@ -11,11 +11,11 @@ import TypeErasure.ErasedValueType, ValueClasses._ import reporting._ import NameKinds.SuperAccessorName -object ElimErasedValueType { +object ElimErasedValueType: val name: String = "elimErasedValueType" val description: String = "expand erased value types to their underlying implmementation types" - def elimEVT(tp: Type)(using Context): Type = tp match { + def elimEVT(tp: Type)(using Context): Type = tp match case ErasedValueType(_, underlying) => elimEVT(underlying) case tp: MethodType => @@ -24,8 +24,6 @@ object ElimErasedValueType { tp.derivedLambdaType(tp.paramNames, paramTypes, retType) case _ => tp - } -} /** This phase erases ErasedValueType to their underlying type. * It also removes the synthetic cast methods u2evt$ and evt2u$ which are @@ -45,9 +43,9 @@ class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase => override def runsAfter: Set[String] = Set(Erasure.name) - def transformInfo(tp: Type, sym: Symbol)(using Context): Type = sym match { + def transformInfo(tp: Type, sym: Symbol)(using Context): Type = sym match case sym: ClassSymbol if sym.is(ModuleClass) => - sym.companionClass match { + sym.companionClass match case origClass: ClassSymbol if isDerivedValueClass(origClass) => val cinfo = tp.asInstanceOf[ClassInfo] val decls1 = cinfo.decls.cloneScope @@ -58,15 +56,13 @@ class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase => cinfo.derivedClassInfo(decls = decls1) case _ => tp - } case _ => elimEVT(tp) - } def transformTypeOfTree(tree: Tree)(using Context): Tree = tree.withType(elimEVT(tree.tpe)) - override def transformApply(tree: Apply)(using Context): Tree = { + override def transformApply(tree: Apply)(using Context): Tree = val Apply(fun, args) = tree // The casts to and from ErasedValueType are no longer needed once ErasedValueType @@ -77,14 +73,13 @@ class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase => else tree transformTypeOfTree(t) - } /** Check that we don't have pairs of methods that override each other after * this phase, yet do not have matching types before erasure. */ - private def checkNoClashes(root: Symbol)(using Context) = { - val opc = atPhase(thisPhase) { - new OverridingPairs.Cursor(root) { + private def checkNoClashes(root: Symbol)(using Context) = + val opc = atPhase(thisPhase): + new OverridingPairs.Cursor(root): override def isSubParent(parent: Symbol, bc: Symbol)(using Context) = // Need to compute suparents before erasure to not filter out parents // that are bypassed with different types. See neg/11719a.scala. @@ -93,10 +88,8 @@ class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase => !sym.is(Method) || sym.is(Bridge) || super.exclude(sym) override def matches(sym1: Symbol, sym2: Symbol) = sym1.signature == sym2.signature - } - } - def checkNoConflict(sym1: Symbol, sym2: Symbol, info: Type)(using Context): Unit = { + def checkNoConflict(sym1: Symbol, sym2: Symbol, info: Type)(using Context): Unit = val site = root.thisType val info1 = site.memberInfo(sym1) val info2 = site.memberInfo(sym2) @@ -105,7 +98,7 @@ class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase => def bothPolyApply = sym1.name == nme.apply && (sym1.owner.derivesFrom(defn.PolyFunctionClass) || - sym2.owner.derivesFrom(defn.PolyFunctionClass)) + sym2.owner.derivesFrom(defn.PolyFunctionClass)) // super-accessors start as private, and their expanded name can clash after // erasure. TODO: Verify that this is OK. @@ -113,8 +106,7 @@ class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase => if (sym1.name != sym2.name && !bothSuperAccessors || !info1.matchesLoosely(info2) && !bothPolyApply) report.error(DoubleDefinition(sym1, sym2, root), root.srcPos) - } - while (opc.hasNext) { + while (opc.hasNext) val sym1 = opc.overriding val sym2 = opc.overridden // Do the test at the earliest phase where both symbols existed. @@ -122,13 +114,10 @@ class ElimErasedValueType extends MiniPhase with InfoTransformer { thisPhase => sym1.originDenotation.validFor.firstPhaseId max sym2.originDenotation.validFor.firstPhaseId atPhase(elimByNamePhase.next)(checkNoConflict(sym1, sym2, sym1.info)) opc.next() - } - } - override def prepareForTypeDef(tree: TypeDef)(using Context): Context = { + override def prepareForTypeDef(tree: TypeDef)(using Context): Context = checkNoClashes(tree.symbol) ctx - } override def transformInlined(tree: Inlined)(using Context): Tree = transformTypeOfTree(tree) diff --git a/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala b/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala index 2f55826ec2a3..3d938e6d02e4 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimOpaque.scala @@ -13,13 +13,12 @@ import SymDenotations.SymDenotation import DenotTransformers._ import Names._ -object ElimOpaque { +object ElimOpaque: val name: String = "elimOpaque" val description: String = "turn opaque into normal aliases" -} /** Rewrites opaque type aliases to normal alias types */ -class ElimOpaque extends MiniPhase with DenotTransformer { +class ElimOpaque extends MiniPhase with DenotTransformer: thisPhase => import ast.tpd._ @@ -33,19 +32,18 @@ class ElimOpaque extends MiniPhase with DenotTransformer { // base types of opaque aliases change override def changesBaseTypes = true - def transform(ref: SingleDenotation)(using Context): SingleDenotation = { + def transform(ref: SingleDenotation)(using Context): SingleDenotation = val sym = ref.symbol - ref match { + ref match case ref: SymDenotation if sym.isOpaqueAlias => ref.copySymDenotation( info = TypeAlias(ref.opaqueAlias), initFlags = ref.flags &~ (Opaque | Deferred)) case ref: SymDenotation if sym.containsOpaques => - def stripOpaqueRefinements(tp: Type): Type = tp match { + def stripOpaqueRefinements(tp: Type): Type = tp match case RefinedType(parent, rname, TypeAlias(_)) if ref.info.decl(rname).symbol.isOpaqueAlias => stripOpaqueRefinements(parent) case _ => tp - } val cinfo = sym.asClass.classInfo val strippedSelfType = stripOpaqueRefinements(cinfo.selfType) ref.copySymDenotation( @@ -55,8 +53,6 @@ class ElimOpaque extends MiniPhase with DenotTransformer { ref.derivedSingleDenotation(sym, TypeAlias(sym.opaqueAlias.asSeenFrom(ref.prefix, sym.owner))) case _ => ref - } - } /** Resolve overloading of `==` and `!=` methods with the representation * types in order to avoid boxing. @@ -72,4 +68,3 @@ class ElimOpaque extends MiniPhase with DenotTransformer { tree else tree -} diff --git a/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala b/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala index 3ddc8b614bae..869aaf438ddd 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimOuterSelect.scala @@ -10,7 +10,7 @@ import NameKinds.OuterSelectName /** This phase rewrites outer selects `E.n_` which were introduced by * inlining to outer paths. */ -class ElimOuterSelect extends MiniPhase { +class ElimOuterSelect extends MiniPhase: import ast.tpd._ override def phaseName: String = ElimOuterSelect.name @@ -25,13 +25,11 @@ class ElimOuterSelect extends MiniPhase { * length `n`. */ override def transformSelect(tree: Select)(using Context): Tree = - tree.name match { + tree.name match case OuterSelectName(_, nhops) => val SkolemType(tp) = tree.tpe: @unchecked ExplicitOuter.outer.path(start = tree.qualifier, count = nhops).ensureConforms(tp) case _ => tree - } -} object ElimOuterSelect: val name: String = "elimOuterSelect" diff --git a/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala index 83349f1f6199..8171adcf3f41 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala @@ -10,7 +10,7 @@ import MegaPhase.MiniPhase /** Eliminates syntactic references to package terms as prefixes of classes, so that there's no chance * they accidentally end up in the backend. */ -class ElimPackagePrefixes extends MiniPhase { +class ElimPackagePrefixes extends MiniPhase: override def phaseName: String = ElimPackagePrefixes.name @@ -19,18 +19,15 @@ class ElimPackagePrefixes extends MiniPhase { override def transformSelect(tree: Select)(using Context): Tree = if (isPackageClassRef(tree)) Ident(tree.tpe.asInstanceOf[TypeRef]) else tree - override def checkPostCondition(tree: Tree)(using Context): Unit = tree match { + override def checkPostCondition(tree: Tree)(using Context): Unit = tree match case tree: Select => assert(!isPackageClassRef(tree), i"Unexpected reference to package in $tree") case _ => - } /** Is the given tree a reference to a type in a package? */ - private def isPackageClassRef(tree: Select)(using Context): Boolean = tree.tpe match { + private def isPackageClassRef(tree: Select)(using Context): Boolean = tree.tpe match case TypeRef(prefix, _) => prefix.termSymbol.is(Package) case _ => false - } -} object ElimPackagePrefixes: val name: String = "elimPackagePrefixes" diff --git a/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala b/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala index 756ddd9bf0eb..72b4b963942c 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimPolyFunction.scala @@ -18,7 +18,7 @@ import TypeErasure.ErasedValueType, ValueClasses._ * def apply(x_1: P_1, ..., x_N: P_N): R = rhs * } */ -class ElimPolyFunction extends MiniPhase with DenotTransformer { +class ElimPolyFunction extends MiniPhase with DenotTransformer: import tpd._ @@ -30,7 +30,7 @@ class ElimPolyFunction extends MiniPhase with DenotTransformer { override def changesParents: Boolean = true // Replaces PolyFunction by FunctionN - override def transform(ref: SingleDenotation)(using Context) = ref match { + override def transform(ref: SingleDenotation)(using Context) = ref match case ref: ClassDenotation if ref.symbol != defn.PolyFunctionClass && ref.derivesFrom(defn.PolyFunctionClass) => val cinfo = ref.classInfo val newParent = functionTypeOfPoly(cinfo) @@ -43,29 +43,23 @@ class ElimPolyFunction extends MiniPhase with DenotTransformer { ref.copySymDenotation(info = cinfo.derivedClassInfo(declaredParents = newParents)) case _ => ref - } - def functionTypeOfPoly(cinfo: ClassInfo)(using Context): Type = { + def functionTypeOfPoly(cinfo: ClassInfo)(using Context): Type = val applyMeth = cinfo.decls.lookup(nme.apply).info val arity = applyMeth.paramNamess.head.length defn.FunctionType(arity) - } - override def transformTemplate(tree: Template)(using Context): Tree = { + override def transformTemplate(tree: Template)(using Context): Tree = val newParents = tree.parents.mapconserve(parent => - if (parent.tpe.typeSymbol == defn.PolyFunctionClass) { + if (parent.tpe.typeSymbol == defn.PolyFunctionClass) val cinfo = tree.symbol.owner.asClass.classInfo tpd.TypeTree(functionTypeOfPoly(cinfo)) - } else parent ) cpy.Template(tree)(parents = newParents) - } -} -object ElimPolyFunction { +object ElimPolyFunction: val name: String = "elimPolyFunction" val description: String = "rewrite PolyFunction subclasses to FunctionN subclasses" -} diff --git a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala index 359b882ef26b..1e1b015887f4 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimRepeated.scala @@ -14,10 +14,9 @@ import Denotations._, SymDenotations._ import DenotTransformers._ import NullOpsDecorator._ -object ElimRepeated { +object ElimRepeated: val name: String = "elimRepeated" val description: String = "rewrite vararg parameters and arguments" -} /** A transformer that eliminates repeated parameters (T*) from all types, replacing * them with Seq or Array types and adapting repeated arguments to conform to @@ -89,12 +88,11 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => private def overridesJava(sym: Symbol)(using Context) = sym.memberCanMatchInheritedSymbols && sym.owner.info.baseClasses.drop(1).exists { bc => - bc.is(JavaDefined) && { + bc.is(JavaDefined) `&&`: val other = bc.info.nonPrivateDecl(sym.name) other.hasAltWith { alt => sym.owner.thisType.memberInfo(alt.symbol).matchesLoosely(sym.info) } - } } private def hasVarargsAnnotation(sym: Symbol)(using Context) = sym.hasAnnotation(defn.VarargsAnnot) @@ -191,7 +189,7 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => ref(sym.termRef) .appliedToArgss(init) .appliedToTermArgs(last :+ wrapArray(vararg, elemtp)) - }) + }) Thicket(tree, forwarderDef) else tree @@ -248,7 +246,7 @@ class ElimRepeated extends MiniPhase with InfoTransformer { thisPhase => if isBridge then flags | Artifact else if hasAnnotation && !parentHasAnnotation then flags &~ Override else flags, - info = toJavaVarArgs(original.info) + info = toJavaVarArgs(original.info) ).asTerm // Find methods that would conflict with the forwarder if the latter existed. diff --git a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala index 02612253c735..84893d10a5f8 100644 --- a/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala +++ b/compiler/src/dotty/tools/dotc/transform/ElimStaticThis.scala @@ -11,7 +11,7 @@ import dotty.tools.dotc.core.Types.{ThisType, TermRef} /** Replace This references to module classes in static methods by global identifiers to the * corresponding modules. */ -class ElimStaticThis extends MiniPhase { +class ElimStaticThis extends MiniPhase: import ast.tpd._ override def phaseName: String = ElimStaticThis.name @@ -19,24 +19,21 @@ class ElimStaticThis extends MiniPhase { override def description: String = ElimStaticThis.description override def transformThis(tree: This)(using Context): Tree = - if (!tree.symbol.is(Package) && ctx.owner.enclosingMethod.is(JavaStatic)) { + if (!tree.symbol.is(Package) && ctx.owner.enclosingMethod.is(JavaStatic)) assert(tree.symbol.is(ModuleClass)) ref(tree.symbol.sourceModule) - } else tree override def transformIdent(tree: tpd.Ident)(using Context): tpd.Tree = if (ctx.owner.enclosingMethod.is(JavaStatic)) - tree.tpe match { + tree.tpe match case TermRef(thiz: ThisType, _) if thiz.cls.is(ModuleClass, JavaDefined) => ref(thiz.cls.sourceModule).select(tree.symbol) case TermRef(thiz: ThisType, _) => assert(tree.symbol.is(Flags.JavaStatic) || thiz.cls.is(JavaDefined)) tree case _ => tree - } else tree -} object ElimStaticThis: val name: String = "elimStaticThis" diff --git a/compiler/src/dotty/tools/dotc/transform/Erasure.scala b/compiler/src/dotty/tools/dotc/transform/Erasure.scala index f068a48cdba4..f851582d9923 100644 --- a/compiler/src/dotty/tools/dotc/transform/Erasure.scala +++ b/compiler/src/dotty/tools/dotc/transform/Erasure.scala @@ -36,7 +36,7 @@ import core.Mode import util.Property import reporting._ -class Erasure extends Phase with DenotTransformer { +class Erasure extends Phase with DenotTransformer: override def phaseName: String = Erasure.name @@ -48,15 +48,13 @@ class Erasure extends Phase with DenotTransformer { override def changesMembers: Boolean = true // the phase adds bridges override def changesParents: Boolean = true // the phase drops Any - def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref match { + def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref match case ref: SymDenotation => def isCompacted(symd: SymDenotation) = - symd.isAnonymousFunction && { - atPhase(ctx.phase.next)(symd.info) match { + symd.isAnonymousFunction `&&`: + atPhase(ctx.phase.next)(symd.info) match case MethodType(nme.ALLARGS :: Nil) => true case _ => false - } - } def erasedName = if ref.is(Flags.Method) @@ -71,7 +69,7 @@ class Erasure extends Phase with DenotTransformer { ref.targetName assert(ctx.phase == this, s"transforming $ref at ${ctx.phase}") - if (ref.symbol eq defn.ObjectClass) { + if (ref.symbol eq defn.ObjectClass) // After erasure, all former Any members are now Object members val ClassInfo(pre, _, ps, decls, selfInfo) = ref.info: @unchecked val extendedScope = decls.cloneScope @@ -81,8 +79,7 @@ class Erasure extends Phase with DenotTransformer { info = transformInfo(ref.symbol, ClassInfo(pre, defn.ObjectClass, ps, extendedScope, selfInfo)) ) - } - else { + else val oldSymbol = ref.symbol val newSymbol = if ((oldSymbol.owner eq defn.AnyClass) && oldSymbol.isConstructor) then @@ -129,20 +126,17 @@ class Erasure extends Phase with DenotTransformer { initFlags = newFlags, info = newInfo, annotations = newAnnotations) - } case ref: JointRefDenotation => new UniqueRefDenotation( ref.symbol, transformInfo(ref.symbol, ref.symbol.info), ref.validFor, ref.prefix) case _ => ref.derivedSingleDenotation(ref.symbol, transformInfo(ref.symbol, ref.symbol.info)) - } private val eraser = new Erasure.Typer(this) - def run(using Context): Unit = { + def run(using Context): Unit = val unit = ctx.compilationUnit unit.tpdTree = eraser.typedExpr(unit.tpdTree)(using ctx.fresh.setTyper(eraser).setPhase(this.next)) - } /** erased classes get erased to empty traits with Object as parent and an empty constructor */ private def erasedClassInfo(cls: ClassSymbol)(using Context) = @@ -150,9 +144,9 @@ class Erasure extends Phase with DenotTransformer { declaredParents = defn.ObjectClass.typeRef :: Nil, decls = newScopeWith(newConstructor(cls, Flags.EmptyFlags, Nil, Nil))) - override def checkPostCondition(tree: tpd.Tree)(using Context): Unit = { + override def checkPostCondition(tree: tpd.Tree)(using Context): Unit = assertErased(tree) - tree match { + tree match case _: tpd.Import => assert(false, i"illegal tree: $tree") case res: tpd.This => assert(!ExplicitOuter.referencesOuter(ctx.owner.lexicallyEnclosingClass, res), @@ -165,29 +159,25 @@ class Erasure extends Phase with DenotTransformer { assert(ret.expr.tpe <:< rType, i"Returned value:${ret.expr} does not conform to result type(${ret.expr.tpe.widen} of method $from") case _ => - } - } /** Assert that tree type and its widened underlying type are erased. * Also assert that term refs have fixed symbols (so we are sure * they need not be reloaded using member; this would likely fail as signatures * may change after erasure). */ - def assertErased(tree: tpd.Tree)(using Context): Unit = { + def assertErased(tree: tpd.Tree)(using Context): Unit = assertErased(tree.typeOpt, tree) if (!defn.isPolymorphicAfterErasure(tree.symbol)) assertErased(tree.typeOpt.widen, tree) if (ctx.mode.isExpr) - tree.tpe match { + tree.tpe match case ref: TermRef => assert(ref.denot.isInstanceOf[SymDenotation] || ref.denot.isInstanceOf[UniqueRefDenotation], i"non-sym type $ref of class ${ref.getClass} with denot of class ${ref.denot.getClass} of $tree") case _ => - } - } - def assertErased(tp: Type, tree: tpd.Tree = tpd.EmptyTree)(using Context): Unit = { + def assertErased(tp: Type, tree: tpd.Tree = tpd.EmptyTree)(using Context): Unit = def isAllowed(cls: Symbol, sourceName: String) = tp.typeSymbol == cls && ctx.compilationUnit.source.file.name == sourceName assert( @@ -198,10 +188,8 @@ class Erasure extends Phase with DenotTransformer { || isAllowed(defn.PairClass, "Tuple.scala") || isAllowed(defn.PureClass, "Pure.scala"), i"The type $tp - ${tp.toString} of class ${tp.getClass} of tree $tree : ${tree.tpe} / ${tree.getClass} is illegal after erasure, phase = ${ctx.phase.prev}") - } -} -object Erasure { +object Erasure: import tpd._ import TypeTestsCasts._ @@ -258,39 +246,34 @@ object Erasure { * This is important for specialization: calls to the super constructor should not box/unbox specialized * fields (see TupleX). (ID) */ - private def safelyRemovableUnboxArg(tree: Tree)(using Context): Tree = tree match { + private def safelyRemovableUnboxArg(tree: Tree)(using Context): Tree = tree match case Apply(fn, arg :: Nil) if isUnbox(fn.symbol) && defn.ScalaBoxedClasses().contains(arg.tpe.typeSymbol) => arg case _ => EmptyTree - } def constant(tree: Tree, const: Tree)(using Context): Tree = (if (isPureExpr(tree)) const else Block(tree :: Nil, const)).withSpan(tree.span) - final def box(tree: Tree, target: => String = "")(using Context): Tree = trace(i"boxing ${tree.showSummary()}: ${tree.tpe} into $target") { - tree.tpe.widen match { + final def box(tree: Tree, target: => String = "")(using Context): Tree = trace(i"boxing ${tree.showSummary()}: ${tree.tpe} into $target"): + tree.tpe.widen match case ErasedValueType(tycon, _) => New(tycon, cast(tree, underlyingOfValueClass(tycon.symbol.asClass)) :: Nil) // todo: use adaptToType? case tp => val cls = tp.classSymbol if (cls eq defn.UnitClass) constant(tree, ref(defn.BoxedUnit_UNIT)) else if (cls eq defn.NothingClass) tree // a non-terminating expression doesn't need boxing - else { + else assert(cls ne defn.ArrayClass) val arg = safelyRemovableUnboxArg(tree) if (arg.isEmpty) ref(boxMethod(cls.asClass)).appliedTo(tree) - else { + else report.log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}") arg - } - } - } - } - def unbox(tree: Tree, pt: Type)(using Context): Tree = trace(i"unboxing ${tree.showSummary()}: ${tree.tpe} as a $pt") { - pt match { + def unbox(tree: Tree, pt: Type)(using Context): Tree = trace(i"unboxing ${tree.showSummary()}: ${tree.tpe} as a $pt"): + pt match case ErasedValueType(tycon, underlying) => def unboxedTree(t: Tree) = adaptToType(t, tycon) @@ -303,7 +286,7 @@ object Erasure { val tree1 = if (tree.tpe isRef defn.NullClass) adaptToType(tree, underlying) - else if (!(tree.tpe <:< tycon)) { + else if (!(tree.tpe <:< tycon)) assert(!(tree.tpe.typeSymbol.isPrimitiveValueClass)) val nullTree = nullLiteral val unboxedNull = adaptToType(nullTree, underlying) @@ -313,26 +296,22 @@ object Erasure { unboxedNull, unboxedTree(t)) } - } else unboxedTree(tree) cast(tree1, pt) case _ => val cls = pt.classSymbol if (cls eq defn.UnitClass) constant(tree, Literal(Constant(()))) - else { + else assert(cls ne defn.ArrayClass) ref(unboxMethod(cls.asClass)).appliedTo(tree) - } - } - } /** Generate a synthetic cast operation from tree.tpe to pt. * Does not do any boxing/unboxing (this is handled upstream). * Casts from and to ErasedValueType are special, see the explanation * in ExtensionMethods#transform. */ - def cast(tree: Tree, pt: Type)(using Context): Tree = trace(i"cast ${tree.tpe.widen} --> $pt", show = true) { + def cast(tree: Tree, pt: Type)(using Context): Tree = trace(i"cast ${tree.tpe.widen} --> $pt", show = true): def wrap(tycon: TypeRef) = ref(u2evt(tycon.typeSymbol.asClass)).appliedTo(tree) def unwrap(tycon: TypeRef) = @@ -340,7 +319,7 @@ object Erasure { assert(!pt.isInstanceOf[SingletonType], pt) if (pt isRef defn.UnitClass) unbox(tree, pt) - else (tree.tpe.widen, pt) match { + else (tree.tpe.widen, pt) match // Convert primitive arrays into reference arrays, this path is only // needed to handle repeated arguments, see // `Definitions#FromJavaObjectSymbol` and `ElimRepeated#adaptToArray`. @@ -354,11 +333,10 @@ object Erasure { if (tp1 <:< underlying2) // Cast EVT(tycon1, underlying1) to EVT(tycon2, EVT(tycon1, underlying1)) wrap(tycon2) - else { + else assert(underlying1 <:< tp2, i"Non-sensical cast between unrelated types $tp1 and $tp2") // Cast EVT(tycon1, EVT(tycon2, underlying2)) to EVT(tycon2, underlying2) unwrap(tycon1) - } // When only one type is an EVT then we already know that the other one is the underlying case (_, ErasedValueType(tycon2, _)) => @@ -371,8 +349,6 @@ object Erasure { primitiveConversion(tree, pt.classSymbol) else tree.asInstance(pt) - } - } /** Adaptation of an expression `e` to an expected type `PT`, applying the following * rewritings exhaustively as long as the type of `e` is not a subtype of `PT`. @@ -387,25 +363,25 @@ object Erasure { def adaptToType(tree: Tree, pt: Type)(using Context): Tree = pt match case _: FunProto | AnyFunctionProto => tree case _ => tree.tpe.widen match - case mt: MethodType if tree.isTerm => - assert(mt.paramInfos.isEmpty, i"bad adapt for $tree: $mt") - adaptToType(tree.appliedToNone, pt) - case tpw => - if (pt.isInstanceOf[ProtoType] || tree.tpe <:< pt) - tree - else if (tpw.isErasedValueType) - if (pt.isErasedValueType) then - tree.asInstance(pt) - else + case mt: MethodType if tree.isTerm => + assert(mt.paramInfos.isEmpty, i"bad adapt for $tree: $mt") + adaptToType(tree.appliedToNone, pt) + case tpw => + if (pt.isInstanceOf[ProtoType] || tree.tpe <:< pt) + tree + else if (tpw.isErasedValueType) + if (pt.isErasedValueType) then + tree.asInstance(pt) + else + adaptToType(box(tree), pt) + else if (pt.isErasedValueType) + adaptToType(unbox(tree, pt), pt) + else if (tpw.isPrimitiveValueType && !pt.isPrimitiveValueType) adaptToType(box(tree), pt) - else if (pt.isErasedValueType) - adaptToType(unbox(tree, pt), pt) - else if (tpw.isPrimitiveValueType && !pt.isPrimitiveValueType) - adaptToType(box(tree), pt) - else if (pt.isPrimitiveValueType && !tpw.isPrimitiveValueType) - adaptToType(unbox(tree, pt), pt) - else - cast(tree, pt) + else if (pt.isPrimitiveValueType && !tpw.isPrimitiveValueType) + adaptToType(unbox(tree, pt), pt) + else + cast(tree, pt) end adaptToType /** The following code: @@ -534,20 +510,19 @@ object Erasure { val rhs = Apply(meth, bridgeParams.lazyZip(implParamTypes).map(ctx.typer.adapt(_, _))) ctx.typer.adapt(rhs, bridgeType.resultType) }, - targetType = functionalInterface).withSpan(tree.span) + targetType = functionalInterface).withSpan(tree.span) else tree end adaptClosure end Boxing - class Typer(erasurePhase: DenotTransformer) extends typer.ReTyper with NoChecking { + class Typer(erasurePhase: DenotTransformer) extends typer.ReTyper with NoChecking: import Boxing._ - def isErased(tree: Tree)(using Context): Boolean = tree match { + def isErased(tree: Tree)(using Context): Boolean = tree match case TypeApply(Select(qual, _), _) if tree.symbol == defn.Any_typeCast => isErased(qual) case _ => tree.symbol.isEffectivelyErased - } /** Check that Java statics and packages can only be used in selections. */ @@ -601,17 +576,15 @@ object Erasure { if sym.owner.isClass then sym.dropAfter(erasurePhase) tpd.EmptyTree - def erasedType(tree: untpd.Tree)(using Context): Type = { + def erasedType(tree: untpd.Tree)(using Context): Type = val tp = tree.typeOpt if (tree.isTerm) erasedRef(tp) else valueErasure(tp) - } - override def promote(tree: untpd.Tree)(using Context): tree.ThisTree[Type] = { + override def promote(tree: untpd.Tree)(using Context): tree.ThisTree[Type] = assert(tree.hasType) val erasedTp = erasedType(tree) report.log(s"promoting ${tree.show}: ${erasedTp.showWithUnderlying()}") tree.withType(erasedTp) - } /** When erasing most TypeTrees we should not semi-erase value types. * This is not the case for [[DefDef#tpt]], [[ValDef#tpt]] and [[Typed#tpt]], they @@ -664,7 +637,7 @@ object Erasure { * e.clone -> e.clone' where clone' is Object's clone method * e.m -> e.[]m if `m` is an array operation other than `clone`. */ - override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { + override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = if tree.name == nme.apply && integrateSelect(tree) then return typed(tree.qualifier, pt) @@ -677,7 +650,7 @@ object Erasure { // We cannot simply call `erasure` on the qualifier because its erasure might be // `Object` due to how we erase intersections (see pos/i13950.scala). // Instead, we manually lookup the type of `apply` in the qualifier. - inContext(preErasureCtx) { + inContext(preErasureCtx): val qualTp = tree.qualifier.typeOpt.widen if qualTp.derivesFrom(defn.PolyFunctionClass) then erasePolyFunctionApply(qualTp.select(nme.apply).widen).classSymbol @@ -685,7 +658,6 @@ object Erasure { eraseErasedFunctionApply(qualTp.select(nme.apply).widen.asInstanceOf[MethodType]).classSymbol else NoSymbol - } else val owner = sym.maybeOwner if defn.specialErasure.contains(owner) then @@ -704,7 +676,7 @@ object Erasure { val owner = mapOwner(origSym) val sym = (if (owner eq origSym.maybeOwner) origSym else owner.info.decl(tree.name).symbol) - .orElse { + .orElse: // We fail the sym.exists test for pos/i15158.scala, where we pass an infinitely // recurring match type to an overloaded constructor. An equivalent test // with regular apply methods succeeds. It's at present unclear whether @@ -715,7 +687,6 @@ object Erasure { // trigger the assert(sym.exists, ...) below. val prevSym = tree.symbol(using preErasureCtx) if prevSym.isConstructor then prevSym else NoSymbol - } assert(sym.exists, i"no owner from $owner/${origSym.showLocated} in $tree") @@ -732,7 +703,7 @@ object Erasure { else assignType(untpd.cpy.Select(tree)(qual, tree.name.primitiveArrayOp), qual) - def adaptIfSuper(qual: Tree): Tree = qual match { + def adaptIfSuper(qual: Tree): Tree = qual match case Super(thisQual, untpd.EmptyTypeIdent) => val SuperType(thisType, supType) = qual.tpe: @unchecked if (sym.owner.is(Flags.Trait)) @@ -742,7 +713,6 @@ object Erasure { qual.withType(SuperType(thisType, thisType.firstParent.typeConstructor)) case _ => qual - } /** Can we safely use `cls` as a qualifier without getting a runtime error on * the JVM due to its accessibility checks? @@ -751,15 +721,14 @@ object Erasure { // Scala classes are always emitted as public, unless the // `private` modifier is used, but a non-private class can never // extend a private class, so such a class will never be a cast target. - !cls.is(Flags.JavaDefined) || { + !cls.is(Flags.JavaDefined) `||`: // We can't rely on `isContainedWith` here because packages are // not nested from the JVM point of view. val boundary = cls.accessBoundary(cls.owner)(using preErasureCtx) (boundary eq defn.RootClass) || (ctx.owner.enclosingPackageClass eq boundary) - } - def recur(qual: Tree): Tree = { + def recur(qual: Tree): Tree = val qualIsPrimitive = qual.tpe.widen.isPrimitiveValueType val symIsPrimitive = sym.owner.isPrimitiveValueClass @@ -772,7 +741,7 @@ object Erasure { recur(unbox(qual, sym.owner.typeRef)) else if (sym.owner eq defn.ArrayClass) selectArrayMember(qual, originalQual) - else { + else val qual1 = adaptIfSuper(qual) if (qual1.tpe.derivesFrom(sym.owner) || qual1.isInstanceOf[Super]) select(qual1, sym) @@ -791,38 +760,31 @@ object Erasure { em"Unable to emit reference to ${sym.showLocated}, ${sym.owner} is not accessible in ${ctx.owner.enclosingClass}") tp recur(cast(qual1, castTarget)) - } - } checkNotErased(recur(qual1)) - } override def typedThis(tree: untpd.This)(using Context): Tree = if (tree.symbol == ctx.owner.lexicallyEnclosingClass || tree.symbol.isStaticOwner) promote(tree) - else { + else report.log(i"computing outer path from ${ctx.owner.ownersIterator.toList}%, % to ${tree.symbol}, encl class = ${ctx.owner.enclosingClass}") outer.path(toCls = tree.symbol) - } - override def typedTypeApply(tree: untpd.TypeApply, pt: Type)(using Context): Tree = { + override def typedTypeApply(tree: untpd.TypeApply, pt: Type)(using Context): Tree = val ntree = atPhase(erasurePhase){ // Use erased-type semantic to intercept TypeApply in explicit nulls val interceptCtx = if ctx.explicitNulls then ctx.retractMode(Mode.SafeNulls) else ctx interceptTypeApply(tree.asInstanceOf[TypeApply])(using interceptCtx) }.withSpan(tree.span) - ntree match { + ntree match case TypeApply(fun, args) => val fun1 = typedExpr(fun, AnyFunctionProto) - fun1.tpe.widen match { + fun1.tpe.widen match case funTpe: PolyType => val args1 = args.mapconserve(typedType(_)) untpd.cpy.TypeApply(tree)(fun1, args1).withType(funTpe.instantiate(args1.tpes)) case _ => fun1 - } case _ => typedExpr(ntree, pt) - } - } /** Besides normal typing, this method does uncurrying and collects parameters * to anonymous functions of arity > 22. @@ -896,9 +858,8 @@ object Erasure { else valueErasure(tree.typeOpt) override def typedInlined(tree: untpd.Inlined, pt: Type)(using Context): Tree = - super.typedInlined(tree, pt) match { + super.typedInlined(tree, pt) match case tree: Inlined => Inlines.dropInlined(tree) - } override def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = if (sym.isEffectivelyErased) erasedDef(sym) @@ -941,10 +902,9 @@ object Erasure { def selector(n: Int) = ref(bunchedParam) .select(defn.Array_apply) .appliedTo(Literal(Constant(n))) - val paramDefs = vparams.zipWithIndex.map { + val paramDefs = vparams.zipWithIndex.map: case (paramDef, idx) => assignType(untpd.cpy.ValDef(paramDef)(rhs = selector(idx)), paramDef.symbol) - } vparams = ValDef(bunchedParam) :: Nil rhs1 = Block(paramDefs, rhs1) @@ -1003,14 +963,13 @@ object Erasure { if retainer.hasAnnotation(defn.TargetNameAnnot) then retainer.targetName.unmangle(BodyRetainerName).exclude(BodyRetainerName) else origName - val inlineMeth = atPhase(typerPhase) { + val inlineMeth = atPhase(typerPhase): retainer.owner.info.decl(origName) .matchingDenotation(retainer.owner.thisType, stat.symbol.info, targetName) .symbol - } (inlineMeth, stat) }.toMap - stats.mapConserve { + stats.mapConserve: case stat: DefDef @unchecked if stat.symbol.isRetainedInlineMethod => val rdef = retainerDef(stat.symbol) val fromParams = untpd.allParamSyms(rdef) @@ -1023,14 +982,12 @@ object Erasure { substTo = toParams) cpy.DefDef(stat)(rhs = mapBody.transform(rdef.rhs)) case stat => stat - } - override def typedClosure(tree: untpd.Closure, pt: Type)(using Context): Tree = { + override def typedClosure(tree: untpd.Closure, pt: Type)(using Context): Tree = val xxl = defn.isXXLFunctionClass(tree.typeOpt.typeSymbol) var implClosure = super.typedClosure(tree, pt).asInstanceOf[Closure] if (xxl) implClosure = cpy.Closure(implClosure)(tpt = TypeTree(defn.FunctionXXLClass.typeRef)) adaptClosure(implClosure) - } override def typedNew(tree: untpd.New, pt: Type)(using Context): Tree = checkNotErasedClass(super.typedNew(tree, pt)) @@ -1045,7 +1002,7 @@ object Erasure { override def typedAnnotated(tree: untpd.Annotated, pt: Type)(using Context): Tree = typed(tree.arg, pt) - override def typedStats(stats: List[untpd.Tree], exprOwner: Symbol)(using Context): (List[Tree], Context) = { + override def typedStats(stats: List[untpd.Tree], exprOwner: Symbol)(using Context): (List[Tree], Context) = // discard Imports first, since Bridges will use tree's symbol val stats0 = addRetainedInlineBodies(stats.filter(!_.isInstanceOf[untpd.Import]))(using preErasureCtx) val stats1 = @@ -1053,7 +1010,6 @@ object Erasure { else stats0 val (stats2, finalCtx) = super.typedStats(stats1, exprOwner) (stats2.filterConserve(!_.isEmpty), finalCtx) - } /** Finally drops all (language-) imports in erasure. * Since some of the language imports change the subtyping, @@ -1062,7 +1018,7 @@ object Erasure { override def typedImport(tree: untpd.Import)(using Context) = EmptyTree override def adapt(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = - trace(i"adapting ${tree.showSummary()}: ${tree.tpe} to $pt", show = true) { + trace(i"adapting ${tree.showSummary()}: ${tree.tpe} to $pt", show = true): if ctx.phase != erasurePhase && ctx.phase != erasurePhase.next then // this can happen when reading annotations loaded during erasure, // since these are loaded at phase typer. @@ -1070,11 +1026,8 @@ object Erasure { else if (tree.isEmpty) tree else if (ctx.mode is Mode.Pattern) tree // TODO: replace with assertion once pattern matcher is active else adaptToType(tree, pt) - } override def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = tree - } private def takesBridges(sym: Symbol)(using Context): Boolean = sym.isClass && !sym.isOneOf(Flags.Trait | Flags.Package) -} diff --git a/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala index cf62cffd4cdb..d60bee49a1b7 100644 --- a/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala +++ b/compiler/src/dotty/tools/dotc/transform/EtaReduce.scala @@ -41,7 +41,7 @@ class EtaReduce extends MiniPhase: if (name == nme.apply || defn.FunctionSpecializedApplyNames.contains(name)) && mdef.paramss.head.corresponds(args)((param, arg) => arg.isInstanceOf[Ident] && arg.symbol == param.symbol) - && isPurePath(fn) + && isPurePath(fn) && fn.tpe <:< tree.tpe && defn.isFunctionClass(fn.tpe.widen.typeSymbol) => report.log(i"eta reducing $tree --> $fn") diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala index 41e5b76ca874..1fca3fdfda46 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala @@ -41,7 +41,7 @@ class ExpandPrivate extends MiniPhase with IdentityDenotTransformer { thisPhase override def changesMembers: Boolean = true // the phase introduces new members with mangled names override def checkPostCondition(tree: Tree)(using Context): Unit = - tree match { + tree match case t: DefDef => val sym = t.symbol def hasWeakerAccess(other: Symbol) = @@ -53,7 +53,6 @@ class ExpandPrivate extends MiniPhase with IdentityDenotTransformer { thisPhase if (fail.exists) assert(false, i"${sym.showFullName}: ${sym.info} has weaker access than superclass method ${fail.showFullName}: ${fail.info}") case _ => - } private def isVCPrivateParamAccessor(d: SymDenotation)(using Context) = d.isTerm && d.isAllOf(PrivateParamAccessor) && isDerivedValueClass(d.owner) @@ -66,51 +65,44 @@ class ExpandPrivate extends MiniPhase with IdentityDenotTransformer { thisPhase private def ensurePrivateAccessible(d: SymDenotation)(using Context) = if (isVCPrivateParamAccessor(d)) d.ensureNotPrivate.installAfter(thisPhase) - else if (d.is(PrivateTerm) && !d.owner.is(Package) && d.owner != ctx.owner.lexicallyEnclosingClass) { + else if (d.is(PrivateTerm) && !d.owner.is(Package) && d.owner != ctx.owner.lexicallyEnclosingClass) // Paths `p1` and `p2` are similar if they have a common suffix that follows // possibly different directory paths. That is, their common suffix extends // in both cases either to the start of the path or to a file separator character. // TODO: should we test absolute paths instead? - def isSimilar(p1: String, p2: String): Boolean = { + def isSimilar(p1: String, p2: String): Boolean = var i = p1.length - 1 var j = p2.length - 1 - while (i >= 0 && j >= 0 && p1(i) == p2(j) && p1(i) != separatorChar) { + while (i >= 0 && j >= 0 && p1(i) == p2(j) && p1(i) != separatorChar) i -= 1 j -= 1 - } (i < 0 || p1(i) == separatorChar) && (j < 0 || p2(j) == separatorChar) - } assert(d.symbol.source.exists && ctx.owner.source.exists && isSimilar(d.symbol.source.path, ctx.owner.source.path), s"private ${d.symbol.showLocated} in ${d.symbol.source} accessed from ${ctx.owner.showLocated} in ${ctx.owner.source}") d.ensureNotPrivate.installAfter(thisPhase) - } - override def transformIdent(tree: Ident)(using Context): Ident = { + override def transformIdent(tree: Ident)(using Context): Ident = ensurePrivateAccessible(tree.symbol) tree - } - override def transformSelect(tree: Select)(using Context): Select = { + override def transformSelect(tree: Select)(using Context): Select = ensurePrivateAccessible(tree.symbol) tree - } - override def transformDefDef(tree: DefDef)(using Context): DefDef = { + override def transformDefDef(tree: DefDef)(using Context): DefDef = val sym = tree.symbol - tree.rhs match { + tree.rhs match case Apply(sel @ Select(_: Super, _), _) if sym.isAllOf(PrivateParamAccessor) && sel.symbol.is(ParamAccessor) && sym.name == sel.symbol.name => sym.ensureNotPrivate.installAfter(thisPhase) case _ => if (isVCPrivateParamAccessor(sym)) sym.ensureNotPrivate.installAfter(thisPhase) - } tree - } } object ExpandPrivate: diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala index 0bfc444e0997..b7800cefa5f7 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandSAMs.scala @@ -43,9 +43,9 @@ class ExpandSAMs extends MiniPhase: override def description: String = ExpandSAMs.description - override def transformBlock(tree: Block)(using Context): Tree = tree match { + override def transformBlock(tree: Block)(using Context): Tree = tree match case Block(stats @ (fn: DefDef) :: Nil, Closure(_, fnRef, tpt)) if fnRef.symbol == fn.symbol => - tpt.tpe match { + tpt.tpe match case NoType => tree // it's a plain function case tpe if defn.isContextFunctionType(tpe) => @@ -61,10 +61,8 @@ class ExpandSAMs extends MiniPhase: val Seq(samDenot) = tpe1.possibleSamMethods cpy.Block(tree)(stats, AnonClass(tpe1 :: Nil, fn.symbol.asTerm :: Nil, samDenot.symbol.asTerm.name :: Nil)) - } case _ => tree - } private def checkNoContextFunction(tpt: Tree)(using Context): Unit = if defn.isContextFunctionType(tpt.tpe) then @@ -112,7 +110,7 @@ class ExpandSAMs extends MiniPhase: * } * ``` */ - private def toPartialFunction(tree: Block, tpe: Type)(using Context): Tree = { + private def toPartialFunction(tree: Block, tpe: Type)(using Context): Tree = val closureDef(anon @ DefDef(_, List(List(param)), _, _)) = tree: @unchecked checkNoContextFunction(anon.tpt) @@ -143,7 +141,7 @@ class ExpandSAMs extends MiniPhase: val isDefinedAtFn = overrideSym(defn.PartialFunction_isDefinedAt) val applyOrElseFn = overrideSym(defn.PartialFunction_applyOrElse) - def translateMatch(tree: Match, pfParam: Symbol, cases: List[CaseDef], defaultValue: Tree)(using Context) = { + def translateMatch(tree: Match, pfParam: Symbol, cases: List[CaseDef], defaultValue: Tree)(using Context) = val selector = tree.selector val cases1 = if cases.exists(isDefaultCase) then cases else @@ -156,37 +154,32 @@ class ExpandSAMs extends MiniPhase: // Needed because a partial function can be written as: // param => param match { case "foo" if foo(param) => param } // And we need to update all references to 'param' - } - def isDefinedAtRhs(paramRefss: List[List[Tree]])(using Context) = { + def isDefinedAtRhs(paramRefss: List[List[Tree]])(using Context) = val tru = Literal(Constant(true)) def translateCase(cdef: CaseDef) = cpy.CaseDef(cdef)(body = tru).changeOwner(anonSym, isDefinedAtFn) val paramRef = paramRefss.head.head val defaultValue = Literal(Constant(false)) translateMatch(pfRHS, paramRef.symbol, pfRHS.cases.map(translateCase), defaultValue) - } - def applyOrElseRhs(paramRefss: List[List[Tree]])(using Context) = { + def applyOrElseRhs(paramRefss: List[List[Tree]])(using Context) = val List(paramRef, defaultRef) = paramRefss(1) def translateCase(cdef: CaseDef) = cdef.changeOwner(anonSym, applyOrElseFn) val defaultValue = defaultRef.select(nme.apply).appliedTo(paramRef) translateMatch(pfRHS, paramRef.symbol, pfRHS.cases.map(translateCase), defaultValue) - } val isDefinedAtDef = transformFollowingDeep(DefDef(isDefinedAtFn, isDefinedAtRhs(_)(using ctx.withOwner(isDefinedAtFn)))) val applyOrElseDef = transformFollowingDeep(DefDef(applyOrElseFn, applyOrElseRhs(_)(using ctx.withOwner(applyOrElseFn)))) List(isDefinedAtDef, applyOrElseDef) } - } - private def checkRefinements(tpe: Type, tree: Tree)(using Context): Type = tpe.dealias match { + private def checkRefinements(tpe: Type, tree: Tree)(using Context): Type = tpe.dealias match case RefinedType(parent, name, _) => if (name.isTermName && tpe.member(name).symbol.ownersIterator.isEmpty) // if member defined in the refinement report.error(em"Lambda does not define $name", tree.srcPos) checkRefinements(parent, tree) case tpe => tpe - } end ExpandSAMs diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala index deb1f665c022..21935ab5e575 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitOuter.scala @@ -48,14 +48,13 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => override def changesMembers: Boolean = true // the phase adds outer accessors /** Add outer accessors if a class always needs an outer pointer */ - override def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match { + override def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match case tp @ ClassInfo(_, cls, _, decls, _) if needsOuterAlways(cls) => val newDecls = decls.cloneScope newOuterAccessors(cls).foreach(newDecls.enter) tp.derivedClassInfo(decls = newDecls) case _ => tp - } override def infoMayChange(sym: Symbol)(using Context): Boolean = sym.isClass && !sym.is(JavaDefined) @@ -70,31 +69,29 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => * a separate phase which needs to run after erasure. However, we make sure here * that the super class constructor is indeed a New, and not just a type. */ - override def transformTemplate(impl: Template)(using Context): Tree = { + override def transformTemplate(impl: Template)(using Context): Tree = val cls = ctx.owner.asClass val isTrait = cls.is(Trait) if needsOuterIfReferenced(cls) && !needsOuterAlways(cls) && referencesOuter(cls, impl) then ensureOuterAccessors(cls) val clsHasOuter = hasOuter(cls) - if (clsHasOuter || cls.mixins.exists(needsOuterIfReferenced)) { + if (clsHasOuter || cls.mixins.exists(needsOuterIfReferenced)) val newDefs = new mutable.ListBuffer[Tree] if (clsHasOuter) if (isTrait) newDefs += DefDef(outerAccessor(cls).asTerm, EmptyTree) - else { + else val outerParamAcc = outerParamAccessor(cls) newDefs += ValDef(outerParamAcc, EmptyTree) newDefs += DefDef(outerAccessor(cls).asTerm, ref(outerParamAcc)) - } for (parentTrait <- cls.mixins) - if (needsOuterIfReferenced(parentTrait)) { + if (needsOuterIfReferenced(parentTrait)) val parentTp = cls.denot.thisType.baseType(parentTrait) val outerAccImpl = newOuterAccessor(cls, parentTrait).enteredAfter(thisPhase) newDefs += DefDef(outerAccImpl, singleton(fixThis(outerPrefix(parentTp)))) - } val parents1 = for (parent <- impl.parents) yield @@ -107,21 +104,17 @@ class ExplicitOuter extends MiniPhase with InfoTransformer { thisPhase => New(parent.tpe, Nil).withSpan(impl.span) case _ => parent cpy.Template(impl)(parents = parents1, body = impl.body ++ newDefs) - } else impl - } - override def transformClosure(tree: Closure)(using Context): tpd.Tree = { - if (tree.tpt ne EmptyTree) { + override def transformClosure(tree: Closure)(using Context): tpd.Tree = + if (tree.tpt ne EmptyTree) val cls = tree.tpt.asInstanceOf[TypeTree].tpe.classSymbol if (cls.exists && hasOuter(cls.asClass)) report.error("Not a single abstract method type, requires an outer pointer", tree.srcPos) - } tree - } } -object ExplicitOuter { +object ExplicitOuter: import ast.tpd._ val name: String = "explicitOuter" @@ -129,10 +122,9 @@ object ExplicitOuter { /** Ensure that class `cls` has outer accessors */ def ensureOuterAccessors(cls: ClassSymbol)(using Context): Unit = - atPhase(explicitOuterPhase.next) { + atPhase(explicitOuterPhase.next): if (!hasOuter(cls)) newOuterAccessors(cls).foreach(_.enteredAfter(explicitOuterPhase.asInstanceOf[DenotTransformer])) - } /** The outer accessor and potentially outer param accessor needed for class `cls` */ private def newOuterAccessors(cls: ClassSymbol)(using Context) = @@ -141,16 +133,14 @@ object ExplicitOuter { /** Scala 2.x and Dotty don't always agree on what should be the type of the outer parameter, * so we replicate the old behavior when passing arguments to methods coming from Scala 2.x. */ - private def outerClass(cls: ClassSymbol)(using Context): Symbol = { + private def outerClass(cls: ClassSymbol)(using Context): Symbol = val encl = cls.owner.enclosingClass if (cls.is(Scala2x)) - encl.asClass.classInfo.selfInfo match { + encl.asClass.classInfo.selfInfo match case tp: TypeRef => tp.classSymbol case self: Symbol => self case _ => encl - } else encl - } /** A new outer accessor or param accessor. * @param owner The class where the outer accessor is located @@ -165,7 +155,7 @@ object ExplicitOuter { * base type of P.this wrt class O * - otherwise O[?, ..., ?] */ - private def newOuterSym(owner: ClassSymbol, cls: ClassSymbol, name: TermName, flags: FlagSet)(using Context) = { + private def newOuterSym(owner: ClassSymbol, cls: ClassSymbol, name: TermName, flags: FlagSet)(using Context) = val outerThis = owner.owner.enclosingClass.thisType val outerCls = outerClass(cls) val prefix = owner.thisType.baseType(cls).normalizedPrefix @@ -178,22 +168,19 @@ object ExplicitOuter { else prefix.widen) val info = if (flags.is(Method)) ExprType(target) else target val currentNestingLevel = ctx.nestingLevel - atPhaseNoEarlier(explicitOuterPhase.next) { // outer accessors are entered at explicitOuter + 1, should not be defined before. + atPhaseNoEarlier(explicitOuterPhase.next): // outer accessors are entered at explicitOuter + 1, should not be defined before. newSymbol(owner, name, SyntheticArtifact | flags, info, coord = cls.coord, nestingLevel = currentNestingLevel) - } - } /** A new param accessor for the outer field in class `cls` */ private def newOuterParamAccessor(cls: ClassSymbol)(using Context) = newOuterSym(cls, cls, nme.OUTER, LocalParamAccessor) /** A new outer accessor for class `cls` which is a member of `owner` */ - private def newOuterAccessor(owner: ClassSymbol, cls: ClassSymbol)(using Context) = { + private def newOuterAccessor(owner: ClassSymbol, cls: ClassSymbol)(using Context) = val deferredIfTrait = if (owner.is(Trait)) Deferred else EmptyFlags val outerAccIfOwn = if (owner == cls) OuterAccessor else EmptyFlags newOuterSym(owner, cls, outerAccName(cls), Final | StableMethod | outerAccIfOwn | deferredIfTrait) - } private def outerAccName(cls: ClassSymbol)(using Context): TermName = nme.OUTER.expandedName(cls) @@ -209,7 +196,7 @@ object ExplicitOuter { !(cls.isStatic || outerOwner(cls).isStaticOwner || cls.is(PureInterface) - ) + ) /** Class unconditionally needs an outer pointer. This is the case if * the class needs an outer pointer if referenced and one of the following holds: @@ -219,8 +206,8 @@ object ExplicitOuter { private def needsOuterAlways(cls: ClassSymbol)(using Context): Boolean = needsOuterIfReferenced(cls) && (!hasLocalInstantiation(cls) || // needs outer because we might not know whether outer is referenced or not - cls.mixins.exists(needsOuterIfReferenced) || // needs outer for parent traits - cls.info.parents.exists(parent => // needs outer to potentially pass along to parent + cls.mixins.exists(needsOuterIfReferenced) || // needs outer for parent traits + cls.info.parents.exists(parent => // needs outer to potentially pass along to parent needsOuterIfReferenced(parent.classSymbol.asClass))) /** Class is only instantiated in the compilation unit where it is defined */ @@ -344,9 +331,9 @@ object ExplicitOuter { private final val HoistableFlags = Method | Lazy | Module /** The outer prefix implied by type `tpe` */ - private def outerPrefix(tpe: Type)(using Context): Type = tpe match { + private def outerPrefix(tpe: Type)(using Context): Type = tpe match case tpe: TypeRef => - tpe.symbol match { + tpe.symbol match case cls: ClassSymbol => if (tpe.prefix eq NoPrefix) cls.owner.enclosingClass.thisType else tpe.prefix @@ -359,10 +346,8 @@ object ExplicitOuter { // For some other unknown reason this works with underlying but not with superType. // I was not able to minimize the problem and parboiled2 spits out way too much // macro generated code to be able to pinpoint the root problem. - } case tpe: TypeProxy => outerPrefix(tpe.underlying) - } /** It's possible (i1755.scala gives an example) that the type * given by outerPrefix contains a This-reference to a module outside @@ -374,14 +359,13 @@ object ExplicitOuter { * in the first place. I was not yet able to find out how such references * arise and how to avoid them. */ - private def fixThis(tpe: Type)(using Context): Type = tpe match { + private def fixThis(tpe: Type)(using Context): Type = tpe match case tpe: ThisType if tpe.cls.is(Module) && !ctx.owner.isContainedIn(tpe.cls) => fixThis(tpe.cls.owner.thisType.select(tpe.cls.sourceModule.asTerm)) case tpe: TermRef => tpe.derivedSelect(fixThis(tpe.prefix)) case _ => tpe - } extension (sym: Symbol) def isOuterParamAccessor(using Context): Boolean = sym.is(ParamAccessor) && sym.name == nme.OUTER @@ -405,39 +389,35 @@ object ExplicitOuter { * get erased during erasure. Therefore, outer arguments have to be passed * no later than erasure. */ - class OuterOps(val ictx: Context) extends AnyVal { + class OuterOps(val ictx: Context) extends AnyVal: /** The context of all operations of this class */ given [Dummy]: Context = ictx /** If `cls` has an outer parameter add one to the method type `tp`. */ def addParam(cls: ClassSymbol, tp: Type): Type = - if (needsOuterParam(cls)) { + if (needsOuterParam(cls)) val mt @ MethodTpe(pnames, ptypes, restpe) = tp: @unchecked mt.derivedLambdaType( nme.OUTER :: pnames, outerClass(cls).typeRef :: ptypes, restpe) - } else tp /** If function in an apply node is a constructor that needs to be passed an * outer argument, the singleton list with the argument, otherwise Nil. */ def args(fun: Tree): List[Tree] = - if (fun.symbol.isConstructor) { + if (fun.symbol.isConstructor) val cls = fun.symbol.owner.asClass - def outerArg(receiver: Tree): Tree = receiver match { + def outerArg(receiver: Tree): Tree = receiver match case New(_) | Super(_, _) => singleton(fixThis(outerPrefix(receiver.tpe))) case This(_) => ref(outerParamAccessor(cls)) // will be rewired to outer argument of secondary constructor in phase Constructors case TypeApply(Select(r, nme.asInstanceOf_), args) => outerArg(r) // cast was inserted, skip - } if (needsOuterParam(cls)) - methPart(fun) match { + methPart(fun) match case Select(receiver, _) => outerArg(receiver).withSpan(fun.span) :: Nil - } else Nil - } else Nil /** If the constructors of the given `cls` need to be passed an outer @@ -466,7 +446,7 @@ object ExplicitOuter { if (count == 0 || count < 0 && treeCls == toCls) tree else val enclClass = ctx.owner.lexicallyEnclosingClass.asClass - val outerAcc = atPhaseNoLater(lambdaLiftPhase) { + val outerAcc = atPhaseNoLater(lambdaLiftPhase): // lambdalift mangles local class names, which means we cannot // reliably find outer acessors anymore tree match @@ -474,7 +454,6 @@ object ExplicitOuter { outerParamAccessor(enclClass) case _ => outerAccessor(treeCls.asClass) - } assert(outerAcc.exists, i"failure to construct path from ${ctx.owner.ownersIterator.toList}%/% to `this` of ${toCls.showLocated};\n${treeCls.showLocated} does not have an outer accessor") loop(tree.select(outerAcc).ensureApplied, count - 1) @@ -483,5 +462,3 @@ object ExplicitOuter { loop(start, count) catch case ex: ClassCastException => throw new ClassCastException(i"no path exists from ${ctx.owner.enclosingClass} to $toCls") - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala index a6f7a29accd7..402398e0e8d1 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExplicitSelf.scala @@ -19,7 +19,7 @@ import SymUtils.* * * Also replaces idents referring to the self type with ThisTypes. */ -class ExplicitSelf extends MiniPhase { +class ExplicitSelf extends MiniPhase: import ast.tpd._ override def phaseName: String = ExplicitSelf.name @@ -38,7 +38,7 @@ class ExplicitSelf extends MiniPhase { report.error(em"self type $selfType of $cls may not be a value class", thiz.srcPos) cpy.Select(tree)(thiz.cast(AndType(selfType, thiz.tpe)), tree.name) - override def transformIdent(tree: Ident)(using Context): Tree = tree.tpe match { + override def transformIdent(tree: Ident)(using Context): Tree = tree.tpe match case tp: ThisType => report.debuglog(s"owner = ${ctx.owner}, context = ${ctx}") This(tp.cls).withSpan(tree.span) @@ -48,16 +48,13 @@ class ExplicitSelf extends MiniPhase { else tree case _ => tree - } - override def transformSelect(tree: Select)(using Context): Tree = tree match { + override def transformSelect(tree: Select)(using Context): Tree = tree match case Select(thiz: This, name) if name.isTermName => val cls = thiz.symbol.asClass if needsCast(tree, cls) then castQualifier(tree, cls, thiz) else tree case _ => tree - } -} object ExplicitSelf: val name: String = "explicitSelf" diff --git a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala index a430f7532066..f3122fb5d221 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExtensionMethods.scala @@ -54,15 +54,15 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete override def changesMembers: Boolean = true // the phase adds extension methods - override def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref match { + override def transform(ref: SingleDenotation)(using Context): SingleDenotation = ref match case moduleClassSym: ClassDenotation if moduleClassSym.is(ModuleClass) => - moduleClassSym.linkedClass match { + moduleClassSym.linkedClass match case valueClass: ClassSymbol if isDerivedValueClass(valueClass) => val cinfo = moduleClassSym.classInfo val decls1 = cinfo.decls.cloneScope val moduleSym = moduleClassSym.symbol.asClass - def enterInModuleClass(sym: Symbol): Unit = { + def enterInModuleClass(sym: Symbol): Unit = decls1.enter(sym) // This is tricky: in this denotation transformer, we transform // companion modules of value classes by adding methods to them. @@ -73,7 +73,6 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete // created them to match the validity of the owner transformed // denotation. sym.validFor = thisPhase.validFor - } // Create extension methods, except if the class comes from Scala 2 // because it adds extension methods before pickling. @@ -96,17 +95,14 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete moduleClassSym.copySymDenotation(info = cinfo.derivedClassInfo(decls = decls1)) case _ => moduleClassSym - } case ref: SymDenotation => var ref1 = ref - if (isMethodWithExtension(ref.symbol) && ref.hasAnnotation(defn.TailrecAnnot)) { + if (isMethodWithExtension(ref.symbol) && ref.hasAnnotation(defn.TailrecAnnot)) ref1 = ref.copySymDenotation() ref1.removeAnnotation(defn.TailrecAnnot) - } - else if (ref.isConstructor && isDerivedValueClass(ref.owner) && ref.isOneOf(AccessFlags)) { + else if (ref.isConstructor && isDerivedValueClass(ref.owner) && ref.isOneOf(AccessFlags)) ref1 = ref.copySymDenotation() ref1.resetFlag(AccessFlags) - } // Drop the Local flag from all private[this] and protected[this] members // that will be moved to the companion object. if (ref.is(Local) && isDerivedValueClass(ref.owner)) @@ -114,24 +110,21 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete else ref1 = ref1.copySymDenotation(initFlags = ref1.flags &~ Local) ref1 case _ => - ref.info match { + ref.info match case ClassInfo(pre, cls, _, _, _) if cls is ModuleClass => - cls.linkedClass match { + cls.linkedClass match case valueClass: ClassSymbol if isDerivedValueClass(valueClass) => val info1 = atPhase(ctx.phase.next)(cls.denot).asClass.classInfo.derivedClassInfo(prefix = pre) ref.derivedSingleDenotation(ref.symbol, info1) case _ => ref - } case _ => ref - } - } protected def rewiredTarget(target: Symbol, derived: Symbol)(using Context): Symbol = if (isMethodWithExtension(target) && target.owner.linkedClass == derived.owner) extensionMethod(target) else NoSymbol - private def createExtensionMethod(imeth: Symbol, staticClass: Symbol)(using Context): TermSymbol = { + private def createExtensionMethod(imeth: Symbol, staticClass: Symbol)(using Context): TermSymbol = val extensionMeth = newSymbol(staticClass, extensionName(imeth), (imeth.flags | Final) &~ (Override | Protected | AbsOverride), fullyParameterizedType(imeth.info, imeth.owner.asClass), @@ -139,7 +132,6 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete atPhase(thisPhase)(extensionMeth.addAnnotations(imeth.annotations)) // need to change phase to add tailrec annotation which gets removed from original method in the same phase. extensionMeth - } private val extensionDefs = MutableSymbolMap[mutable.ListBuffer[Tree]]() // todo: check that when transformation finished map is empty @@ -159,7 +151,7 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete else tree override def transformDefDef(tree: tpd.DefDef)(using Context): tpd.Tree = - if (isMethodWithExtension(tree.symbol)) { + if (isMethodWithExtension(tree.symbol)) val origMeth = tree.symbol val origClass = ctx.owner.asClass val staticClass = origClass.linkedClass @@ -169,11 +161,10 @@ class ExtensionMethods extends MiniPhase with DenotTransformer with FullParamete val store = extensionDefs.getOrElseUpdate(staticClass, new mutable.ListBuffer[Tree]) store += fullyParameterizedDef(extensionMeth, tree) cpy.DefDef(tree)(rhs = forwarder(extensionMeth, tree)) - } else tree } -object ExtensionMethods { +object ExtensionMethods: val name: String = "extmethods" val description: String = "expand methods of value classes with extension methods" @@ -186,7 +177,7 @@ object ExtensionMethods { /** Return the extension method that corresponds to given instance method `meth`. */ def extensionMethod(imeth: Symbol)(using Context): TermSymbol = - atPhase(extensionMethodsPhase.next) { + atPhase(extensionMethodsPhase.next): // FIXME use toStatic instead? val companion = imeth.owner.companionModule val companionInfo = companion.info @@ -197,13 +188,13 @@ object ExtensionMethods { && (if imeth.targetName == imeth.name then // imeth does not have a @targetName annotation, candidate should not have one either candidate.symbol.targetName == candidate.symbol.name - else + else // imeth has a @targetName annotation, candidate's target name must match imeth.targetName == candidate.symbol.targetName ) val matching = candidates.filter(matches) assert(matching.nonEmpty, - i"""no extension method found for: + i"""no extension method found for: | | $imeth:${imeth.info.show} with signature ${imeth.info.signature} in ${companion.moduleClass} | @@ -218,5 +209,3 @@ object ExtensionMethods { // this case will report a "have the same erasure" error later at erasure pahse report.log(i"mutiple extension methods match $imeth: ${candidates.map(c => i"${c.name}:${c.info}")}") matching.head.symbol.asTerm - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index 03639c8af689..255bc05bfcc1 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -20,10 +20,9 @@ import TypeUtils.isErasedValueType import config.Feature import inlines.Inlines.inInlineMethod -object FirstTransform { +object FirstTransform: val name: String = "firstTransform" val description: String = "some transformations to put trees into a canonical form" -} /** The first tree transform * - eliminates some kinds of trees: Imports other than language imports, @@ -44,17 +43,16 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => override def description: String = FirstTransform.description /** eliminate self symbol in ClassInfo */ - override def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match { + override def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match case tp @ ClassInfo(_, _, _, _, self: Symbol) => tp.derivedClassInfo(selfInfo = self.info) case _ => tp - } override protected def infoMayChange(sym: Symbol)(using Context): Boolean = sym.isClass override def checkPostCondition(tree: Tree)(using Context): Unit = - tree match { + tree match case Select(qual, name) if !name.is(OuterSelectName) && tree.symbol.exists => val qualTpe = qual.tpe assert( @@ -65,26 +63,24 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => case _: Export | _: NamedArg | _: TypTree => assert(false, i"illegal tree: $tree") case _ => - } /** Reorder statements so that module classes always come after their companion classes */ - private def reorderAndComplete(stats: List[Tree])(using Context): List[Tree] = { + private def reorderAndComplete(stats: List[Tree])(using Context): List[Tree] = val moduleClassDefs, singleClassDefs = mutable.Map[Name, Tree]() /* Returns the result of reordering stats and prepending revPrefix in reverse order to it. * The result of reorder is equivalent to reorder(stats, revPrefix) = revPrefix.reverse ::: reorder(stats, Nil). * This implementation is tail recursive as long as the element is not a module TypeDef. */ - def reorder(stats: List[Tree], revPrefix: List[Tree]): List[Tree] = stats match { + def reorder(stats: List[Tree], revPrefix: List[Tree]): List[Tree] = stats match case (stat: TypeDef) :: stats1 if stat.symbol.isClass => - if (stat.symbol.is(Flags.Module)) { + if (stat.symbol.is(Flags.Module)) def pushOnTop(xs: List[Tree], ys: List[Tree]): List[Tree] = xs.foldLeft(ys)((ys, x) => x :: ys) moduleClassDefs += (stat.name -> stat) singleClassDefs -= stat.name.stripModuleClassSuffix val stats1r = reorder(stats1, Nil) pushOnTop(revPrefix, if (moduleClassDefs contains stat.name) stat :: stats1r else stats1r) - } else reorder( stats1, @@ -98,10 +94,8 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => ) case stat :: stats1 => reorder(stats1, stat :: revPrefix) case Nil => revPrefix.reverse - } reorder(stats, Nil) - } /** Eliminate self in Template * Under captureChecking, we keep the self type `S` around in a type definition @@ -131,43 +125,39 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => override def transformStats(trees: List[Tree])(using Context): List[Tree] = ast.Trees.flatten(atPhase(thisPhase.next)(reorderAndComplete(trees))) - private object collectBinders extends TreeAccumulator[List[Ident]] { - def apply(annots: List[Ident], t: Tree)(using Context): List[Ident] = t match { + private object collectBinders extends TreeAccumulator[List[Ident]]: + def apply(annots: List[Ident], t: Tree)(using Context): List[Ident] = t match case t @ Bind(_, body) => val annot = untpd.Ident(tpnme.BOUNDTYPE_ANNOT).withType(t.symbol.typeRef) apply(annot :: annots, body) case _ => foldOver(annots, t) - } - } /** Replace type tree `t` of type `T` with `TypeTree(T)`, but record all * nested Bind nodes in annotations. These are interpreted in TreeTypeMaps * so that bound symbols can be properly copied. */ - private def toTypeTree(tree: Tree)(using Context) = { + private def toTypeTree(tree: Tree)(using Context) = val binders = collectBinders.apply(Nil, tree) val result: Tree = TypeTree(tree.tpe).withSpan(tree.span) binders.foldLeft(result)(Annotated(_, _)) - } - override def transformOther(tree: Tree)(using Context): Tree = tree match { + override def transformOther(tree: Tree)(using Context): Tree = tree match case tree: Export => EmptyTree case tree: NamedArg => transformAllDeep(tree.arg) case tree => if (tree.isType) toTypeTree(tree) else tree - } override def transformIdent(tree: Ident)(using Context): Tree = - if (tree.isType) { + if (tree.isType) toTypeTree(tree) - } else if (tree.name != nme.WILDCARD) { + else if (tree.name != nme.WILDCARD) // We constant-fold all idents except wildcards. // AFAIK, constant-foldable wildcard idents can only occur in patterns, for instance as `case _: "a"`. // Constant-folding that would result in `case "a": "a"`, which changes the meaning of the pattern. // Note that we _do_ want to constant-fold idents in patterns that _aren't_ wildcards - // for example, @switch annotation needs to see inlined literals and not indirect references. constToLiteral(tree) - } else tree + else tree override def transformSelect(tree: Select)(using Context): Tree = if (tree.isType) toTypeTree(tree) else constToLiteral(tree) @@ -187,11 +177,10 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => constToLiteral(tree) override def transformIf(tree: If)(using Context): Tree = - tree.cond.tpe match { + tree.cond.tpe match case ConstantType(Constant(c: Boolean)) if isPureExpr(tree.cond) => if (c) tree.thenp else tree.elsep case _ => tree - } /** Perform one of the following simplification if applicable: * @@ -200,19 +189,16 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => * true || y ==> true * false || y ==> y */ - private def foldCondition(tree: Apply)(using Context) = tree.fun match { + private def foldCondition(tree: Apply)(using Context) = tree.fun match case Select(x @ Literal(Constant(c: Boolean)), op) => - tree.args match { + tree.args match case y :: Nil if y.tpe.widen.isRef(defn.BooleanClass) => - op match { + op match case nme.ZAND => if (c) y else x case nme.ZOR => if (c) x else y case _ => tree - } case _ => tree - } case _ => tree - } // invariants: all modules have companion objects // all types are TypeTrees diff --git a/compiler/src/dotty/tools/dotc/transform/Flatten.scala b/compiler/src/dotty/tools/dotc/transform/Flatten.scala index 25df51d0916d..866bb8aa806d 100644 --- a/compiler/src/dotty/tools/dotc/transform/Flatten.scala +++ b/compiler/src/dotty/tools/dotc/transform/Flatten.scala @@ -11,7 +11,7 @@ import MegaPhase.MiniPhase import util.Store /** Lift nested classes to toplevel */ -class Flatten extends MiniPhase with SymTransformer { +class Flatten extends MiniPhase with SymTransformer: import ast.tpd._ override def phaseName: String = Flatten.name @@ -42,10 +42,9 @@ class Flatten extends MiniPhase with SymTransformer { private def liftIfNested(tree: Tree)(using Context) = if (ctx.owner.is(Package)) tree - else { + else transformFollowing(tree).foreachInThicket(t => liftedDefs.nn += t) EmptyTree - } override def transformStats(stats: List[Tree])(using Context): List[Tree] = if ctx.owner.is(Package) then @@ -59,7 +58,6 @@ class Flatten extends MiniPhase with SymTransformer { override def transformTypeDef(tree: TypeDef)(using Context): Tree = liftIfNested(tree) -} object Flatten: val name: String = "flatten" diff --git a/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala b/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala index bf8a6fa6c7bf..8aff08357be1 100644 --- a/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala +++ b/compiler/src/dotty/tools/dotc/transform/ForwardDepChecks.scala @@ -19,21 +19,19 @@ object ForwardDepChecks: type LevelAndIndex = immutable.Map[Symbol, (LevelInfo, Int)] - class OptLevelInfo { + class OptLevelInfo: def levelAndIndex: LevelAndIndex = Map() def enterReference(sym: Symbol, span: Span): Unit = () - } /** A class to help in forward reference checking */ class LevelInfo(val outer: OptLevelInfo, val owner: Symbol, stats: List[Tree])(using Context) - extends OptLevelInfo { + extends OptLevelInfo: override val levelAndIndex: LevelAndIndex = stats.foldLeft(outer.levelAndIndex, 0) {(mi, stat) => val (m, idx) = mi - val m1 = stat match { + val m1 = stat match case stat: MemberDef => m.updated(stat.symbol, (this: @unchecked, idx)) case _ => m - } (m1, idx + 1) }._1 var maxIndex: Int = Int.MinValue @@ -42,14 +40,12 @@ object ForwardDepChecks: override def enterReference(sym: Symbol, span: Span): Unit = if (sym.exists && sym.owner.isTerm) - levelAndIndex.get(sym) match { + levelAndIndex.get(sym) match case Some((level, idx)) if (level.maxIndex < idx) => level.maxIndex = idx level.refSpan = span level.refSym = sym case _ => - } - } val NoLevelInfo: OptLevelInfo = new OptLevelInfo() @@ -84,10 +80,9 @@ class ForwardDepChecks extends MiniPhase: case _ => tree - override def transformIdent(tree: Ident)(using Context): Ident = { + override def transformIdent(tree: Ident)(using Context): Ident = currentLevel.enterReference(tree.symbol, tree.span) tree - } /** Check that self constructor call does not contain references to vals or defs * defined later in the secondary constructor's right hand side. This is tricky @@ -123,12 +118,10 @@ class ForwardDepChecks extends MiniPhase: checkSelfConstructorCall() tree - override def transformNew(tree: New)(using Context): New = { + override def transformNew(tree: New)(using Context): New = currentLevel.enterReference(tree.tpe.typeSymbol, tree.span) - tree.tpe.dealias.foreachPart { + tree.tpe.dealias.foreachPart: case TermRef(_, s: Symbol) => currentLevel.enterReference(s, tree.span) case _ => - } tree - } end ForwardDepChecks diff --git a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala index 8ca600577244..bd5aadea8cf2 100644 --- a/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala +++ b/compiler/src/dotty/tools/dotc/transform/FullParameterization.scala @@ -47,7 +47,7 @@ import ast._ * * @see class-dependent-extension-method.scala in pending/pos. */ -trait FullParameterization { +trait FullParameterization: import tpd._ @@ -88,46 +88,41 @@ trait FullParameterization { * @param liftThisType if true, require created $this to be $this: (Foo[A] & Foo,this). * This is needed if created member stays inside scope of Foo(as in tailrec) */ - def fullyParameterizedType(info: Type, clazz: ClassSymbol, abstractOverClass: Boolean = true, liftThisType: Boolean = false)(using Context): Type = { - val (mtparamCount, origResult) = info match { + def fullyParameterizedType(info: Type, clazz: ClassSymbol, abstractOverClass: Boolean = true, liftThisType: Boolean = false)(using Context): Type = + val (mtparamCount, origResult) = info match case info: PolyType => (info.paramNames.length, info.resultType) case info: ExprType => (0, info.resultType) case _ => (0, info) - } val ctparams = if (abstractOverClass) clazz.typeParams else Nil val ctnames = ctparams.map(_.name) /** The method result type */ - def resultType(mapClassParams: Type => Type) = { + def resultType(mapClassParams: Type => Type) = val thisParamType = mapClassParams(clazz.classInfo.selfType) val firstArgType = if (liftThisType) thisParamType & clazz.thisType else thisParamType MethodType(nme.SELF :: Nil)( mt => firstArgType :: Nil, mt => mapClassParams(origResult).substThisUnlessStatic(clazz, mt.newParamRef(0))) - } /** Replace class type parameters by the added type parameters of the polytype `pt` */ - def mapClassParams(tp: Type, pt: PolyType): Type = { + def mapClassParams(tp: Type, pt: PolyType): Type = val classParamsRange = (mtparamCount until mtparamCount + ctparams.length).toList tp.subst(ctparams, classParamsRange map (pt.paramRefs(_))) - } /** The bounds for the added type parameters of the polytype `pt` */ def mappedClassBounds(pt: PolyType): List[TypeBounds] = ctparams.map(tparam => mapClassParams(tparam.info, pt).bounds) - info match { + info match case info: PolyType => PolyType(info.paramNames ++ ctnames)( pt => (info.paramInfos.map(mapClassParams(_, pt).bounds) ++ - mappedClassBounds(pt)).mapConserve(_.subst(info, pt).bounds), - pt => resultType(mapClassParams(_, pt)).subst(info, pt)) + mappedClassBounds(pt)).mapConserve(_.subst(info, pt).bounds), + pt => resultType(mapClassParams(_, pt)).subst(info, pt)) case _ => if (ctparams.isEmpty) resultType(identity) else PolyType(ctnames)(mappedClassBounds, pt => resultType(mapClassParams(_, pt))) - } - } /** The type parameters (skolems) of the method definition `originalDef`, * followed by the class parameters of its enclosing class. @@ -157,18 +152,17 @@ trait FullParameterization { /** If tree should be rewired, the rewired tree, otherwise EmptyTree. * @param targs Any type arguments passed to the rewired tree. */ - def rewireTree(tree: Tree, targs: List[Tree])(using Context): Tree = { - def rewireCall(thisArg: Tree): Tree = { + def rewireTree(tree: Tree, targs: List[Tree])(using Context): Tree = + def rewireCall(thisArg: Tree): Tree = val rewired = rewiredTarget(tree, derived) - if (rewired.exists) { + if (rewired.exists) val base = thisArg.tpe.baseType(origClass) assert(base.exists) ref(rewired.termRef) .appliedToTypeTrees(targs ++ base.argInfos.map(TypeTree(_))) .appliedTo(thisArg) - } else EmptyTree - } - tree match { + else EmptyTree + tree match case Return(expr, from) if !from.isEmpty => val rewired = rewiredTarget(from, derived) if (rewired.exists) @@ -181,8 +175,6 @@ trait FullParameterization { assert(targs.isEmpty) rewireTree(fn, targs1) case _ => EmptyTree - } - } /** Type rewiring is needed because a previous reference to an instance * method might still persist in the types of enclosing nodes. Example: @@ -197,10 +189,9 @@ trait FullParameterization { * because it is kept by the `cpy` operation of the tree transformer. * It needs to be rewritten to the common result type of `imeth` and `xmeth`. */ - def rewireType(tpe: Type) = tpe match { + def rewireType(tpe: Type) = tpe match case tpe: TermRef if rewiredTarget(tpe.symbol, derived).exists => tpe.widen case _ => tpe - } new TreeTypeMap( typeMap = rewireType(_) @@ -220,7 +211,7 @@ trait FullParameterization { * - the `this` of the enclosing class, * - the value parameters of the original method `originalDef`. */ - def forwarder(derived: TermSymbol, originalDef: DefDef, abstractOverClass: Boolean = true, liftThisType: Boolean = false)(using Context): Tree = { + def forwarder(derived: TermSymbol, originalDef: DefDef, abstractOverClass: Boolean = true, liftThisType: Boolean = false)(using Context): Tree = val fun: Tree = ref(derived.termRef) .appliedToTypes(allInstanceTypeParams(originalDef, abstractOverClass).map(_.typeRef)) @@ -231,19 +222,17 @@ trait FullParameterization { else // this type could have changed on forwarding. Need to insert a cast. originalDef.trailingParamss.foldLeft(fun)((acc, params) => { - val meth = acc.tpe.asInstanceOf[MethodType] - val paramTypes = meth.instantiateParamInfos(params.tpes) - acc.appliedToArgs( + val meth = acc.tpe.asInstanceOf[MethodType] + val paramTypes = meth.instantiateParamInfos(params.tpes) + acc.appliedToArgs( params.lazyZip(paramTypes).map((param, paramType) => { assert(param.tpe <:< paramType.widen) // type should still conform to widened type ref(param.symbol).ensureConforms(paramType) })) - }) + }) fwd.withSpan(originalDef.rhs.span) - } -} -object FullParameterization { +object FullParameterization: /** Assuming `info` is a result of a `fullyParameterizedType` call, the signature of the * original method type `X` after stripping its leading type parameters section, @@ -257,7 +246,7 @@ object FullParameterization { * unpickled from Scala 2 (because Scala 2 extmeths phase happens before * pickling, which is maybe something we should change for 2.14). */ - def memberSignature(info: Type)(using Context): Signature = info match { + def memberSignature(info: Type)(using Context): Signature = info match case info: PolyType => memberSignature(info.resultType) case MethodTpe(nme.SELF :: Nil, _, restpe) => @@ -266,5 +255,3 @@ object FullParameterization { info.derivedLambdaType(otherNames, otherTypes, restpe).signature case _ => Signature.NotAMethod - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala index cc1c0048b68f..1d46ba310b58 100644 --- a/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala +++ b/compiler/src/dotty/tools/dotc/transform/FunctionXXLForwarders.scala @@ -22,16 +22,16 @@ import Types._ * `def apply(xs: Array[Object]): R = this.apply(xs(0).asInstanceOf[T1], ..., xs(n-1).asInstanceOf[Tn]).asInstanceOf[R]` * is generated. */ -class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer { +class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer: import ast.tpd._ override def phaseName: String = FunctionXXLForwarders.name override def description: String = FunctionXXLForwarders.description - override def transformTemplate(impl: Template)(using Context): Template = { + override def transformTemplate(impl: Template)(using Context): Template = - def forwarderRhs(receiver: Tree, xsTree: Tree): Tree = { + def forwarderRhs(receiver: Tree, xsTree: Tree): Tree = val argsApply = ref(xsTree.symbol).select(nme.apply) var idx = -1 val argss = receiver.tpe.widenDealias.paramInfoss.map(_.map { param => @@ -39,7 +39,6 @@ class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer { argsApply.appliedToTermArgs(List(Literal(Constant(idx)))).cast(param) }) ref(receiver.symbol).appliedToArgss(argss).cast(defn.ObjectType) - } if impl.symbol.owner.is(Trait) then return impl @@ -50,16 +49,13 @@ class FunctionXXLForwarders extends MiniPhase with IdentityDenotTransformer { ddef.symbol.signature.paramsSig.size > MaxImplementedFunctionArity && ddef.symbol.allOverriddenSymbols.exists(sym => defn.isXXLFunctionClass(sym.owner)) } - yield { + yield val xsType = defn.ArrayType.appliedTo(List(defn.ObjectType)) val methType = MethodType(List(nme.args))(_ => List(xsType), _ => defn.ObjectType) val meth = newSymbol(ddef.symbol.owner, nme.apply, Synthetic | Method, methType) DefDef(meth, paramss => forwarderRhs(ddef, paramss.head.head)) - } cpy.Template(impl)(body = forwarders ::: impl.body) - } -} object FunctionXXLForwarders: val name: String = "functionXXLForwarders" diff --git a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala index a1baeac272b9..06bd900b4847 100644 --- a/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala +++ b/compiler/src/dotty/tools/dotc/transform/GenericSignatures.scala @@ -25,7 +25,7 @@ import scala.collection.mutable.ListBuffer /** Helper object to generate generic java signatures, as defined in * the Java Virtual Machine Specification, §4.3.4 */ -object GenericSignatures { +object GenericSignatures: /** Generate the signature for `sym0`, with type `info`, as defined in * the Java Virtual Machine Specification, §4.3.4 @@ -40,19 +40,18 @@ object GenericSignatures { else atPhase(erasurePhase)(javaSig0(sym0, info)) @noinline - private final def javaSig0(sym0: Symbol, info: Type)(using Context): Option[String] = { + private final def javaSig0(sym0: Symbol, info: Type)(using Context): Option[String] = val builder = new StringBuilder(64) val isTraitSignature = sym0.enclosingClass.is(Trait) - def superSig(cls: Symbol, parents: List[Type]): Unit = { + def superSig(cls: Symbol, parents: List[Type]): Unit = def isInterfaceOrTrait(sym: Symbol) = sym.is(PureInterface) || sym.is(Trait) // a signature should always start with a class - def ensureClassAsFirstParent(tps: List[Type]) = tps match { + def ensureClassAsFirstParent(tps: List[Type]) = tps match case Nil => defn.ObjectType :: Nil case head :: tail if isInterfaceOrTrait(head.typeSymbol) => defn.ObjectType :: tps case _ => tps - } val minParents = minimizeParents(cls, parents) val validParents = @@ -63,7 +62,6 @@ object GenericSignatures { val ps = ensureClassAsFirstParent(validParents) ps.foreach(boxedSig) - } def boxedSig(tp: Type): Unit = jsig(tp.widenDealias, primitiveOK = false) @@ -78,7 +76,7 @@ object GenericSignatures { * Which should emit a signature `S <: A`. See the handling * of `AndType` in `jsig` which already supports `def foo(x: A & Object)`. */ - def boundsSig(bounds: List[Type]): Unit = { + def boundsSig(bounds: List[Type]): Unit = val (repr :: _, others) = splitIntersection(bounds): @unchecked builder.append(':') @@ -94,7 +92,6 @@ object GenericSignatures { builder.append(':') boxedSig(tp) } - } /** The parents of this intersection where type parameters * that cannot appear in the signature have been replaced @@ -133,32 +130,28 @@ object GenericSignatures { else Right(parent)) - def paramSig(param: TypeParamInfo): Unit = { + def paramSig(param: TypeParamInfo): Unit = builder.append(sanitizeName(param.paramName.lastPart)) boundsSig(hiBounds(param.paramInfo.bounds)) - } def polyParamSig(tparams: List[TypeParamInfo]): Unit = - if (tparams.nonEmpty) { + if (tparams.nonEmpty) builder.append('<') tparams.foreach(paramSig) builder.append('>') - } - def typeParamSig(name: Name): Unit = { + def typeParamSig(name: Name): Unit = builder.append(ClassfileConstants.TVAR_TAG) builder.append(sanitizeName(name)) builder.append(';') - } - def methodResultSig(restpe: Type): Unit = { + def methodResultSig(restpe: Type): Unit = val finalType = restpe.finalResultType val sym = finalType.typeSymbol if (sym == defn.UnitClass || sym == defn.BoxedUnitModule || sym0.isConstructor) builder.append(ClassfileConstants.VOID_TAG) else jsig(finalType) - } // This works as long as mangled names are always valid valid Java identifiers, // if we change our name encoding, we'll have to `throw new UnknownSig` here for @@ -168,24 +161,21 @@ object GenericSignatures { // Anything which could conceivably be a module (i.e. isn't known to be // a type parameter or similar) must go through here or the signature is // likely to end up with Foo.Empty where it needs Foo.Empty$. - def fullNameInSig(sym: Symbol): Unit = { + def fullNameInSig(sym: Symbol): Unit = assert(sym.isClass) val name = atPhase(genBCodePhase) { sanitizeName(sym.fullName).replace('.', '/') } builder.append('L').nn.append(name) - } - def classSig(sym: Symbol, pre: Type = NoType, args: List[Type] = Nil): Unit = { + def classSig(sym: Symbol, pre: Type = NoType, args: List[Type] = Nil): Unit = def argSig(tp: Type): Unit = - tp match { + tp match case bounds: TypeBounds => - if (!(defn.AnyType <:< bounds.hi)) { + if (!(defn.AnyType <:< bounds.hi)) builder.append('+') boxedSig(bounds.hi) - } - else if (!(bounds.lo <:< defn.NothingType)) { + else if (!(bounds.lo <:< defn.NothingType)) builder.append('-') boxedSig(bounds.lo) - } else builder.append('*') case EtaExpansion(tp) => argSig(tp) @@ -195,14 +185,13 @@ object GenericSignatures { boxedSig(tp.widenDealias.widenNullaryMethod) // `tp` might be a singleton type referring to a getter. // Hence the widenNullaryMethod. - } - if (pre.exists) { + if (pre.exists) val preRebound = pre.baseType(sym.owner) // #2585 - if (needsJavaSig(preRebound, Nil)) { + if (needsJavaSig(preRebound, Nil)) val i = builder.length() jsig(preRebound) - if (builder.charAt(i) == 'L') { + if (builder.charAt(i) == 'L') builder.delete(builder.length() - 1, builder.length())// delete ';' // If the prefix is a module, drop the '$'. Classes (or modules) nested in modules // are separated by a single '$' in the filename: `object o { object i }` is o$i$. @@ -217,26 +206,21 @@ object GenericSignatures { // TODO revisit this. Does it align with javac for code that can be expressed in both languages? val delimiter = if (builder.charAt(builder.length() - 1) == '>') '.' else '$' builder.append(delimiter).nn.append(sanitizeName(sym.name)) - } else fullNameInSig(sym) - } else fullNameInSig(sym) - } else fullNameInSig(sym) - if (args.nonEmpty) { + if (args.nonEmpty) builder.append('<') args foreach argSig builder.append('>') - } builder.append(';') - } @noinline - def jsig(tp0: Type, toplevel: Boolean = false, primitiveOK: Boolean = true): Unit = { + def jsig(tp0: Type, toplevel: Boolean = false, primitiveOK: Boolean = true): Unit = val tp = tp0.dealias - tp match { + tp match case ref @ TypeParamRef(_: PolyType, _) => val erasedUnderlying = fullErasure(ref.underlying.bounds.hi) @@ -257,10 +241,9 @@ object GenericSignatures { case RefOrAppliedType(sym, pre, args) => if (sym == defn.PairClass && tp.tupleArity > Definitions.MaxTupleArity) jsig(defn.TupleXXLClass.typeRef) - else if (isTypeParameterInSig(sym, sym0)) { + else if (isTypeParameterInSig(sym, sym0)) assert(!sym.isAliasType, "Unexpected alias type: " + sym) typeParamSig(sym.name.lastPart) - } else if (defn.specialErasure.contains(sym)) jsig(defn.specialErasure(sym).nn.typeRef) else if (sym == defn.UnitClass || sym == defn.BoxedUnitModule) @@ -273,17 +256,15 @@ object GenericSignatures { if (!primitiveOK) jsig(defn.ObjectType) else if (sym == defn.UnitClass) jsig(defn.BoxedUnitClass.typeRef) else builder.append(defn.typeTag(sym.info)) - else if (ValueClasses.isDerivedValueClass(sym)) { + else if (ValueClasses.isDerivedValueClass(sym)) val erasedUnderlying = fullErasure(tp) if (erasedUnderlying.isPrimitiveValueType && !primitiveOK) classSig(sym, pre, args) else jsig(erasedUnderlying, toplevel, primitiveOK) - } - else if (defn.isSyntheticFunctionClass(sym)) { + else if (defn.isSyntheticFunctionClass(sym)) val erasedSym = defn.functionTypeErasure(sym).typeSymbol classSig(erasedSym, pre, if (erasedSym.typeParams.isEmpty) Nil else args) - } else if sym.isClass then classSig(sym, pre, args) else @@ -310,13 +291,12 @@ object GenericSignatures { case mtpe: MethodType => // erased method parameters do not make it to the bytecode. - def effectiveParamInfoss(t: Type)(using Context): List[List[Type]] = t match { + def effectiveParamInfoss(t: Type)(using Context): List[List[Type]] = t match case t: MethodType if t.hasErasedParams => t.paramInfos.zip(t.erasedParams).collect{ case (i, false) => i } :: effectiveParamInfoss(t.resType) case t: MethodType => t.paramInfos :: effectiveParamInfoss(t.resType) case _ => Nil - } val params = effectiveParamInfoss(mtpe).flatten val restpe = mtpe.finalResultType builder.append('(') @@ -357,53 +337,45 @@ object GenericSignatures { val etp = erasure(tp) if (etp eq tp) throw new UnknownSig else jsig(etp, toplevel, primitiveOK) - } - } val throwsArgs = sym0.annotations flatMap ThrownException.unapply if (needsJavaSig(info, throwsArgs)) - try { + try jsig(info, toplevel = true) throwsArgs.foreach { t => builder.append('^') jsig(t, toplevel = true) } Some(builder.toString) - } catch { case _: UnknownSig => None } else None - } private class UnknownSig extends Exception /* Drop redundant types (ones which are implemented by some other parent) from the immediate parents. * This is important on Android because there is otherwise an interface explosion. */ - private def minimizeParents(cls: Symbol, parents: List[Type])(using Context): List[Type] = if (parents.isEmpty) parents else { + private def minimizeParents(cls: Symbol, parents: List[Type])(using Context): List[Type] = if (parents.isEmpty) parents else // val requiredDirect: Symbol => Boolean = requiredDirectInterfaces.getOrElse(cls, Set.empty) var rest = parents.tail var leaves = collection.mutable.ListBuffer.empty[Type] += parents.head - while (rest.nonEmpty) { + while (rest.nonEmpty) val candidate = rest.head val candidateSym = candidate.typeSymbol // val required = requiredDirect(candidateSym) || !leaves.exists(t => t.typeSymbol isSubClass candidateSym) val required = !leaves.exists(t => t.typeSymbol.isSubClass(candidateSym)) - if (required) { + if (required) leaves = leaves filter { t => val ts = t.typeSymbol !(ts.is(Trait) || ts.is(PureInterface)) || !candidateSym.isSubClass(ts) // requiredDirect(ts) || !ts.isTraitOrInterface || !candidateSym.isSubClass(ts) } leaves += candidate - } rest = rest.tail - } leaves.toList - } - private def hiBounds(bounds: TypeBounds)(using Context): List[Type] = bounds.hi.widenDealias match { + private def hiBounds(bounds: TypeBounds)(using Context): List[Type] = bounds.hi.widenDealias match case AndType(tp1, tp2) => hiBounds(tp1.bounds) ::: hiBounds(tp2.bounds) case tp => tp :: Nil - } // only refer to type params that will actually make it into the sig, this excludes: @@ -426,13 +398,12 @@ object GenericSignatures { // included (use pre.baseType(cls.owner)). // // This requires that cls.isClass. - private def rebindInnerClass(pre: Type, cls: Symbol)(using Context): Type = { + private def rebindInnerClass(pre: Type, cls: Symbol)(using Context): Type = val owner = cls.owner if (owner.is(PackageClass) || owner.isTerm) pre else cls.owner.info /* .tpe_* */ - } - private object RefOrAppliedType { - def unapply(tp: Type)(using Context): Option[(Symbol, Type, List[Type])] = tp match { + private object RefOrAppliedType: + def unapply(tp: Type)(using Context): Option[(Symbol, Type, List[Type])] = tp match case TypeParamRef(_, _) => Some((tp.typeSymbol, tp, Nil)) case TermParamRef(_, _) => @@ -444,18 +415,15 @@ object GenericSignatures { Some((pre.typeSymbol, pre, args)) case _ => None - } - } - private def needsJavaSig(tp: Type, throwsArgs: List[Type])(using Context): Boolean = !ctx.settings.YnoGenericSig.value && { + private def needsJavaSig(tp: Type, throwsArgs: List[Type])(using Context): Boolean = !ctx.settings.YnoGenericSig.value `&&`: def needs(tp: Type) = (new NeedsSigCollector).apply(false, tp) needs(tp) || throwsArgs.exists(needs) - } - private class NeedsSigCollector(using Context) extends TypeAccumulator[Boolean] { + private class NeedsSigCollector(using Context) extends TypeAccumulator[Boolean]: override def apply(x: Boolean, tp: Type): Boolean = if (!x) - tp.dealias match { + tp.dealias match case RefinedType(parent, refinedName, refinedInfo) => val sym = parent.typeSymbol if (sym == defn.ArrayClass) foldOver(x, refinedInfo) @@ -475,7 +443,4 @@ object GenericSignatures { true case tp => foldOver(x, tp) - } else x - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/Getters.scala b/compiler/src/dotty/tools/dotc/transform/Getters.scala index ad06bfb0a504..bff4cda10eef 100644 --- a/compiler/src/dotty/tools/dotc/transform/Getters.scala +++ b/compiler/src/dotty/tools/dotc/transform/Getters.scala @@ -63,7 +63,7 @@ class Getters extends MiniPhase with SymTransformer { thisPhase => override def description: String = Getters.description - override def transformSym(d: SymDenotation)(using Context): SymDenotation = { + override def transformSym(d: SymDenotation)(using Context): SymDenotation = def noGetterNeeded = d.isOneOf(NoGetterNeededFlags) || d.isAllOf(PrivateLocal) && !d.owner.is(Trait) && !isDerivedValueClass(d.owner) && !d.is(Lazy) || @@ -72,7 +72,7 @@ class Getters extends MiniPhase with SymTransformer { thisPhase => d.isSelfSym var d1 = - if (d.isTerm && (d.is(Lazy) || d.owner.isClass) && d.info.isValueType && !noGetterNeeded) { + if (d.isTerm && (d.is(Lazy) || d.owner.isClass) && d.info.isValueType && !noGetterNeeded) val maybeStable = if (d.isStableMember) StableRealizable else EmptyFlags d.copySymDenotation( initFlags = d.flags | maybeStable | AccessorCreationFlags, @@ -81,7 +81,6 @@ class Getters extends MiniPhase with SymTransformer { thisPhase => // SingleDenotations referring to a getter. In this case it does not // seem to be a problem since references to a getter don't care whether // it's a `T` or a `=> T` - } else d // Drop the Local flag from all private[this] and protected[this] members. @@ -89,7 +88,6 @@ class Getters extends MiniPhase with SymTransformer { thisPhase => if (d1 ne d) d1.resetFlag(Local) else d1 = d1.copySymDenotation(initFlags = d1.flags &~ Local) d1 - } private val NoGetterNeededFlags = Method | Param | JavaDefined | JavaStatic val newSetters = util.HashSet[Symbol]() @@ -119,7 +117,6 @@ class Getters extends MiniPhase with SymTransformer { thisPhase => else tree } -object Getters { +object Getters: val name: String = "getters" val description: String = "replace non-private vals and vars with getter defs" -} diff --git a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala index 9a36d65babe8..4dfdc0d2f4fd 100644 --- a/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala +++ b/compiler/src/dotty/tools/dotc/transform/HoistSuperArgs.scala @@ -15,10 +15,9 @@ import core.NameKinds.SuperArgName import SymUtils._ import core.Decorators.* -object HoistSuperArgs { +object HoistSuperArgs: val name: String = "hoistSuperArgs" val description: String = "hoist complex arguments of supercalls to enclosing scope" -} /** This phase hoists complex arguments of supercalls and this-calls out of the enclosing class. * Example: @@ -56,7 +55,7 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase * parent super calls and constructor definitions. * Hoisted superarg methods are collected in `superArgDefs` */ - class Hoister(cls: Symbol)(using Context) { + class Hoister(cls: Symbol)(using Context): val superArgDefs: mutable.ListBuffer[DefDef] = new mutable.ListBuffer /** If argument is complex, hoist it out into its own method and refer to the @@ -66,7 +65,7 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase * @param lifted Argument definitions that were lifted out in a call prefix * @return The argument after possible hoisting */ - private def hoistSuperArg(arg: Tree, cdef: DefDef, lifted: List[Symbol]): Tree = { + private def hoistSuperArg(arg: Tree, cdef: DefDef, lifted: List[Symbol]): Tree = val constr = cdef.symbol lazy val origParams = // The parameters that can be accessed in the supercall if (constr == cls.primaryConstructor) @@ -75,20 +74,18 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase allParamSyms(cdef) /** The parameter references defined by the constructor info */ - def allParamRefs(tp: Type): List[ParamRef] = tp match { + def allParamRefs(tp: Type): List[ParamRef] = tp match case tp: LambdaType => tp.paramRefs ++ allParamRefs(tp.resultType) case _ => Nil - } /** Splice `restpe` in final result type position of `tp` */ - def replaceResult(tp: Type, restpe: Type): Type = tp match { + def replaceResult(tp: Type, restpe: Type): Type = tp match case tp: LambdaType => tp.derivedLambdaType(resType = replaceResult(tp.resultType, restpe)) case _ => restpe - } /** A method representing a hoisted supercall argument */ - def newSuperArgMethod(argType: Type) = { + def newSuperArgMethod(argType: Type) = val (staticFlag, methOwner) = if (cls.owner.is(Package)) (JavaStatic, cls) else (EmptyFlags, cls.owner) val argTypeWrtConstr = argType.widenTermRefExpr.subst(origParams, allParamRefs(constr.info)) @@ -104,14 +101,12 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase info = replaceResult(constr.info, abstractedArgType), coord = constr.coord ).enteredAfter(thisPhase) - } /** Type of a reference implies that it needs to be hoisted */ - def refNeedsHoist(tp: Type): Boolean = tp match { + def refNeedsHoist(tp: Type): Boolean = tp match case tp: ThisType => !tp.cls.isStaticOwner && !cls.isContainedIn(tp.cls) case tp: TermRef => refNeedsHoist(tp.prefix) case _ => false - } /** Super call argument is complex, needs to be hoisted */ def needsHoist(tree: Tree) = tree match @@ -122,16 +117,15 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase case _ => false /** Only rewire types that are owned by the current Hoister and is an param or accessor */ - def needsRewire(tp: Type) = tp match { + def needsRewire(tp: Type) = tp match case ntp: NamedType => val owner = ntp.symbol.maybeOwner (owner == cls || owner == constr) && ntp.symbol.isParamOrAccessor || lifted.contains(ntp.symbol) case _ => false - } // begin hoistSuperArg - arg match { + arg match case _ if arg.existsSubTree(needsHoist) => val superMeth = newSuperArgMethod(arg.tpe) val superArgDef = DefDef(superMeth, prefss => { @@ -140,15 +134,13 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase val tmap = new TreeTypeMap( typeMap = new TypeMap { lazy val origToParam = (origParams ::: lifted).zip(paramSyms).toMap - def apply(tp: Type) = tp match { + def apply(tp: Type) = tp match case tp: NamedType if needsRewire(tp) => - origToParam.get(tp.symbol) match { + origToParam.get(tp.symbol) match case Some(mappedSym) => if (tp.symbol.isType) mappedSym.typeRef else mappedSym.termRef case None => mapOver(tp) - } case _ => mapOver(tp) - } }, treeMap = { case tree: RefTree if needsRewire(tree.tpe) => @@ -159,7 +151,7 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase tmap(arg).changeOwnerAfter(constr, superMeth, thisPhase) }) superArgDefs += superArgDef - def termParamRefs(tp: Type, params: List[Symbol]): List[List[Tree]] = tp match { + def termParamRefs(tp: Type, params: List[Symbol]): List[List[Tree]] = tp match case tp: PolyType => termParamRefs(tp.resultType, params) case tp: MethodType => @@ -167,7 +159,6 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase thisParams.map(ref) :: termParamRefs(tp.resultType, otherParams) case _ => Nil - } val (typeParams, termParams) = origParams.span(_.isType) var res = ref(superMeth) .appliedToTypes(typeParams.map(_.typeRef)) @@ -177,8 +168,6 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase report.log(i"hoist $arg, cls = $cls = $res") res case _ => arg - } - } /** Hoist complex arguments in super call out of the class. */ def hoistSuperArgsFromCall(superCall: Tree, cdef: DefDef, lifted: mutable.ListBuffer[Symbol]): Tree = superCall match @@ -205,7 +194,7 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase superCall /** Hoist complex arguments in this-constructor call of secondary constructor out of the class. */ - def hoistSuperArgsFromConstr(stat: Tree): Tree = stat match { + def hoistSuperArgsFromConstr(stat: Tree): Tree = stat match case constr: DefDef if constr.symbol.isClassConstructor => val lifted = new mutable.ListBuffer[Symbol] cpy.DefDef(constr)(rhs = @@ -216,20 +205,18 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase expr) case _ => hoistSuperArgsFromCall(constr.rhs, constr, lifted) - ) + ) case _ => stat - } - } override def transformTypeDef(tdef: TypeDef)(using Context): Tree = - tdef.rhs match { + tdef.rhs match case impl @ Template(cdef, superCall :: others, _, _) => val hoist = new Hoister(tdef.symbol) val hoistedSuperCall = hoist.hoistSuperArgsFromCall(superCall, cdef, new mutable.ListBuffer) val hoistedBody = impl.body.mapconserve(hoist.hoistSuperArgsFromConstr) if (hoist.superArgDefs.isEmpty) tdef - else { + else val (staticSuperArgDefs, enclSuperArgDefs) = hoist.superArgDefs.toList.partition(_.symbol.is(JavaStatic)) flatTree( @@ -238,8 +225,6 @@ class HoistSuperArgs extends MiniPhase with IdentityDenotTransformer { thisPhase parents = hoistedSuperCall :: others, body = hoistedBody ++ staticSuperArgDefs)) :: enclSuperArgDefs) - } case _ => tdef - } } diff --git a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala index 047a187bad68..f79cc8989b48 100644 --- a/compiler/src/dotty/tools/dotc/transform/InlineVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/InlineVals.scala @@ -29,7 +29,7 @@ class InlineVals extends MiniPhase: tree /** Check that `tree.rhs` can be right hand-side of an `inline` value definition. */ - private def checkInlineConformant(tree: ValDef)(using Context): Unit = { + private def checkInlineConformant(tree: ValDef)(using Context): Unit = if tree.symbol.is(Inline, butNot = DeferredOrTermParamOrAccessor) && !Inlines.inInlineMethod then @@ -52,7 +52,6 @@ class InlineVals extends MiniPhase: report.error(em"`inline val` with `null` is not supported.\n\nTo inline a `null` consider using `inline def`", rhs) else report.error(em"inline value must contain a literal constant value.\n\nTo inline more complex types consider using `inline def`", rhs) - } object InlineVals: val name: String = "inlineVals" diff --git a/compiler/src/dotty/tools/dotc/transform/Inlining.scala b/compiler/src/dotty/tools/dotc/transform/Inlining.scala index 10f73fa94e08..124a1eee53a5 100644 --- a/compiler/src/dotty/tools/dotc/transform/Inlining.scala +++ b/compiler/src/dotty/tools/dotc/transform/Inlining.scala @@ -17,7 +17,7 @@ import dotty.tools.dotc.staging.StagingLevel import scala.collection.mutable.ListBuffer /** Inlines all calls to inline methods that are not in an inline method or a quote */ -class Inlining extends MacroTransform { +class Inlining extends MacroTransform: import tpd._ @@ -40,7 +40,7 @@ class Inlining extends MacroTransform { newUnits override def checkPostCondition(tree: Tree)(using Context): Unit = - tree match { + tree match case PackageDef(pid, _) if tree.symbol.owner == defn.RootClass => new TreeTraverser { def traverse(tree: Tree)(using Context): Unit = @@ -51,21 +51,19 @@ class Inlining extends MacroTransform { traverseChildren(tree) }.traverse(tree) case _ => - } - def newTransformer(using Context): Transformer = new Transformer { + def newTransformer(using Context): Transformer = new Transformer: override def transform(tree: tpd.Tree)(using Context): tpd.Tree = new InliningTreeMap().transform(tree) - } - private class InliningTreeMap extends TreeMapWithImplicits { + private class InliningTreeMap extends TreeMapWithImplicits: /** List of top level classes added by macro annotation in a package object. * These are added to the PackageDef that owns this particular package object. */ private val newTopClasses = MutableSymbolMap[ListBuffer[Tree]]() - override def transform(tree: Tree)(using Context): Tree = { + override def transform(tree: Tree)(using Context): Tree = tree match case tree: MemberDef => if tree.symbol.is(Inline) then tree @@ -106,9 +104,6 @@ class Inlining extends MacroTransform { case _ => if tree.isType then tree else super.transform(tree) - } - } -} object Inlining: val name: String = "inlining" diff --git a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala index 29572a4ae30d..f4aa18f53070 100644 --- a/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala +++ b/compiler/src/dotty/tools/dotc/transform/InstrumentCoverage.scala @@ -201,7 +201,7 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: InstrumentedParts.singleExprTree(coverageCall, transformed) override def transform(tree: Tree)(using Context): Tree = - inContext(transformCtx(tree)) { // necessary to position inlined code properly + inContext(transformCtx(tree)): // necessary to position inlined code properly tree match // simple cases case tree: (Import | Export | Literal | This | Super | New) => tree @@ -306,7 +306,6 @@ class InstrumentCoverage extends MacroTransform with IdentityDenotTransformer: // For everything else just recurse and transform case _ => super.transform(tree) - } /** Transforms a `def lhs = rhs` and instruments its body (rhs). * diff --git a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala index 046147f20d82..5892c1b367a6 100644 --- a/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala +++ b/compiler/src/dotty/tools/dotc/transform/Instrumentation.scala @@ -55,10 +55,9 @@ class Instrumentation extends MiniPhase { thisPhase => CollectionIterableClass = requiredClass("scala.collection.Iterable") ctx - private def record(category: String, tree: Tree)(using Context): Tree = { + private def record(category: String, tree: Tree)(using Context): Tree = val key = Literal(Constant(s"$category@${tree.sourcePos.show}")) ref(Stats_doRecord).appliedTo(key, Literal(Constant(1))) - } private def recordSize(tree: Apply)(using Context): Tree = tree.fun match case sel @ Select(qual, name) @@ -88,21 +87,19 @@ class Instrumentation extends MiniPhase { thisPhase => cpy.DefDef(tree)(rhs = rhs1) else tree - override def transformApply(tree: Apply)(using Context): Tree = tree.fun match { + override def transformApply(tree: Apply)(using Context): Tree = tree.fun match case Select(nu: New, _) => cpy.Block(tree)(record(i"alloc/${nu.tpe}", tree) :: Nil, tree) case ref: RefTree if namesToRecord.contains(ref.name) && ok => cpy.Block(tree)(record(i"call/${ref.name}", tree) :: Nil, recordSize(tree)) case _ => tree - } - override def transformBlock(tree: Block)(using Context): Block = tree.expr match { + override def transformBlock(tree: Block)(using Context): Block = tree.expr match case _: Closure => cpy.Block(tree)(record("closure/", tree) :: tree.stats, tree.expr) case _ => tree - } } object Instrumentation: diff --git a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala index c95500d856be..299a01c0713d 100644 --- a/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/InterceptedMethods.scala @@ -10,10 +10,9 @@ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.core.Types._ import dotty.tools.dotc.transform.MegaPhase.MiniPhase -object InterceptedMethods { +object InterceptedMethods: val name: String = "intercepted" val description: String = "rewrite universal `!=`, `##` methods" -} /** Replace member references as follows: * @@ -22,7 +21,7 @@ object InterceptedMethods { * - `x.##` for ## in Any becomes calls to ScalaRunTime.hash, * using the most precise overload available */ -class InterceptedMethods extends MiniPhase { +class InterceptedMethods extends MiniPhase: import tpd._ override def phaseName: String = InterceptedMethods.name @@ -37,19 +36,17 @@ class InterceptedMethods extends MiniPhase { transformRefTree(tree) private def transformRefTree(tree: RefTree)(using Context): Tree = - if (tree.symbol.isTerm && (defn.Any_## eq tree.symbol)) { - val qual = tree match { + if (tree.symbol.isTerm && (defn.Any_## eq tree.symbol)) + val qual = tree match case id: Ident => tpd.desugarIdentPrefix(id) case sel: Select => sel.qualifier - } val rewritten = poundPoundValue(qual) report.log(s"$phaseName rewrote $tree to $rewritten") rewritten - } else tree // TODO: add missing cases from scalac - private def poundPoundValue(tree: Tree)(using Context) = { + private def poundPoundValue(tree: Tree)(using Context) = val s = tree.tpe.typeSymbol def staticsCall(methodName: TermName): Tree = @@ -60,23 +57,18 @@ class InterceptedMethods extends MiniPhase { else if (s == defn.LongClass) staticsCall(nme.longHash) else if (s == defn.FloatClass) staticsCall(nme.floatHash) else staticsCall(nme.anyHash) - } - override def transformApply(tree: Apply)(using Context): Tree = { - lazy val qual = tree.fun match { + override def transformApply(tree: Apply)(using Context): Tree = + lazy val qual = tree.fun match case Select(qual, _) => qual case ident: Ident => - ident.tpe match { + ident.tpe match case TermRef(prefix: TermRef, _) => tpd.ref(prefix) case TermRef(prefix: ThisType, _) => tpd.This(prefix.cls) - } - } if tree.fun.symbol == defn.Any_!= then qual.select(defn.Any_==).appliedToTermArgs(tree.args).select(defn.Boolean_!).withSpan(tree.span) else tree - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala index 6ec0f330efff..079b117dea43 100644 --- a/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala +++ b/compiler/src/dotty/tools/dotc/transform/LambdaLift.scala @@ -81,7 +81,7 @@ object LambdaLift: (fv, proxy) proxyMap(owner) = freeProxyPairs.toMap - private def liftedInfo(local: Symbol)(using Context): Type = local.info match { + private def liftedInfo(local: Symbol)(using Context): Type = local.info match case MethodTpe(pnames, ptypes, restpe) => val ps = proxies(local) MethodType( @@ -89,10 +89,9 @@ object LambdaLift: ps.map(_.info) ++ ptypes, restpe) case info => info - } - private def liftLocals()(using Context): Unit = { - for ((local, lOwner) <- deps.logicalOwner) { + private def liftLocals()(using Context): Unit = + for ((local, lOwner) <- deps.logicalOwner) val (newOwner, maybeStatic) = if lOwner is Package then (local.topLevelClass, JavaStatic) else (lOwner, EmptyFlags) @@ -113,11 +112,9 @@ object LambdaLift: name = newName(local), initFlags = initFlags, info = liftedInfo(local)).installAfter(thisPhase) - } for (local <- deps.tracked) if (!deps.logicalOwner.contains(local)) local.copySymDenotation(info = liftedInfo(local)).installAfter(thisPhase) - } def currentEnclosure(using Context): Symbol = ctx.owner.enclosingMethodOrClass @@ -125,53 +122,45 @@ object LambdaLift: private def inCurrentOwner(sym: Symbol)(using Context) = sym.enclosure == currentEnclosure - private def proxy(sym: Symbol)(using Context): Symbol = { + private def proxy(sym: Symbol)(using Context): Symbol = def liftedEnclosure(sym: Symbol) = deps.logicalOwner.getOrElse(sym, sym.enclosure) - def searchIn(enclosure: Symbol): Symbol = { - if (!enclosure.exists) { + def searchIn(enclosure: Symbol): Symbol = + if (!enclosure.exists) def enclosures(encl: Symbol): List[Symbol] = if (encl.exists) encl :: enclosures(liftedEnclosure(encl)) else Nil throw new IllegalArgumentException(i"Could not find proxy for ${sym.showDcl} in ${sym.ownersIterator.toList}, encl = $currentEnclosure, owners = ${currentEnclosure.ownersIterator.toList}%, %; enclosures = ${enclosures(currentEnclosure)}%, %") - } report.debuglog(i"searching for $sym(${sym.owner}) in $enclosure") - proxyMap get enclosure match { + proxyMap get enclosure match case Some(pmap) => - pmap get sym match { + pmap get sym match case Some(proxy) => return proxy case none => - } case none => - } searchIn(liftedEnclosure(enclosure)) - } if (inCurrentOwner(sym)) sym else searchIn(currentEnclosure) - } - def memberRef(sym: Symbol)(using Context): Tree = { + def memberRef(sym: Symbol)(using Context): Tree = val clazz = sym.enclosingClass val qual = if (clazz.isStaticOwner || ctx.owner.enclosingClass == clazz) singleton(clazz.thisType) else if (ctx.owner.isConstructor) - outerParam.get(ctx.owner) match { + outerParam.get(ctx.owner) match case Some(param) => outer.path(start = Ident(param.termRef), toCls = clazz) case _ => outer.path(toCls = clazz) - } else outer.path(toCls = clazz) thisPhase.transformFollowingDeep(qual.select(sym)) - } - def proxyRef(sym: Symbol)(using Context): Tree = { + def proxyRef(sym: Symbol)(using Context): Tree = val psym = atPhase(thisPhase)(proxy(sym)) thisPhase.transformFollowingDeep(if (psym.owner.isTerm) ref(psym) else memberRef(psym)) - } def addFreeArgs(sym: Symbol, args: List[Tree])(using Context): List[Tree] = val fvs = deps.freeVars(sym) if fvs.nonEmpty then fvs.toList.map(proxyRef(_)) ++ args else args - def addFreeParams(tree: Tree, proxies: List[Symbol])(using Context): Tree = proxies match { + def addFreeParams(tree: Tree, proxies: List[Symbol])(using Context): Tree = proxies match case Nil => tree case proxies => val sym = tree.symbol @@ -181,15 +170,14 @@ object LambdaLift: thisPhase.transformFollowingDeep(memberRef(field).becomes(ref(param))) /** Initialize proxy fields from proxy parameters and map `rhs` from fields to parameters */ - def copyParams(rhs: Tree) = { + def copyParams(rhs: Tree) = val fvs = deps.freeVars(sym.owner).toList val classProxies = fvs.map(proxyOf(sym.owner, _)) val constrProxies = fvs.map(proxyOf(sym, _)) report.debuglog(i"copy params ${constrProxies.map(_.showLocated)}%, % to ${classProxies.map(_.showLocated)}%, %}") seq(classProxies.lazyZip(constrProxies).map(proxyInit), rhs) - } - tree match { + tree match case tree: DefDef => cpy.DefDef(tree)( paramss = tree.termParamss.map(freeParamDefs ++ _), @@ -198,22 +186,18 @@ object LambdaLift: else tree.rhs) case tree: Template => cpy.Template(tree)(body = freeParamDefs ++ tree.body) - } - } - def liftDef(tree: MemberDef)(using Context): Tree = { + def liftDef(tree: MemberDef)(using Context): Tree = val buf = liftedDefs(tree.symbol.owner) thisPhase.transformFollowing(rename(tree, tree.symbol.name)).foreachInThicket(buf += _) EmptyTree - } def needsLifting(sym: Symbol): Boolean = deps.logicalOwner.contains(sym) // initialization - atPhase(thisPhase.next) { + atPhase(thisPhase.next): generateProxies() liftLocals() - } end Lifter end LambdaLift @@ -275,9 +259,9 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase => override def prepareForUnit(tree: Tree)(using Context): Context = ctx.fresh.updateStore(Lifter, new Lifter(thisPhase)) - override def transformIdent(tree: Ident)(using Context): Tree = { + override def transformIdent(tree: Ident)(using Context): Tree = val sym = tree.symbol - tree.tpe match { + tree.tpe match case tpe @ TermRef(prefix, _) => val lft = lifter if (prefix eq NoPrefix) @@ -291,8 +275,6 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase => else tree case _ => tree - } - } override def transformSelect(tree: Select)(using Context): Tree = val denot = tree.denot @@ -312,7 +294,7 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase => override def transformClosure(tree: Closure)(using Context): Closure = cpy.Closure(tree)(env = lifter.addFreeArgs(tree.meth.symbol, tree.env)) - override def transformDefDef(tree: DefDef)(using Context): Tree = { + override def transformDefDef(tree: DefDef)(using Context): Tree = val sym = tree.symbol val lft = lifter val paramsAdded = @@ -320,21 +302,18 @@ class LambdaLift extends MiniPhase with IdentityDenotTransformer { thisPhase => else tree if (lft.needsLifting(sym)) lft.liftDef(paramsAdded) else paramsAdded - } - override def transformReturn(tree: Return)(using Context): Tree = tree.expr match { + override def transformReturn(tree: Return)(using Context): Tree = tree.expr match case Block(stats, value) => Block(stats, Return(value, tree.from)).withSpan(tree.span) case _ => tree - } - override def transformTemplate(tree: Template)(using Context): Template = { + override def transformTemplate(tree: Template)(using Context): Template = val cls = ctx.owner val lft = lifter val impl = lft.addFreeParams(tree, lft.proxies(cls)).asInstanceOf[Template] cpy.Template(impl)(body = impl.body ++ lft.liftedDefs.remove(cls).get) - } override def transformTypeDef(tree: TypeDef)(using Context): Tree = if (lifter.needsLifting(tree.symbol)) lifter.liftDef(tree) else tree diff --git a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala index b433e37e39c0..6e621d32704b 100644 --- a/compiler/src/dotty/tools/dotc/transform/LazyVals.scala +++ b/compiler/src/dotty/tools/dotc/transform/LazyVals.scala @@ -20,7 +20,7 @@ import transform.SymUtils.* import scala.collection.mutable -class LazyVals extends MiniPhase with IdentityDenotTransformer { +class LazyVals extends MiniPhase with IdentityDenotTransformer: import LazyVals._ import tpd._ @@ -48,20 +48,18 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { /** A map of lazy values to the fields they should null after initialization. */ private var lazyValNullables: IdentityHashMap[Symbol, mutable.ListBuffer[Symbol]] | Null = _ - private def nullableFor(sym: Symbol)(using Context) = { + private def nullableFor(sym: Symbol)(using Context) = // optimisation: value only used once, we can remove the value from the map val nullables = lazyValNullables.nn.remove(sym) if (nullables == null) Nil else nullables.toList - } private def needsBoxing(tp: Type)(using Context): Boolean = tp.classSymbol.isPrimitiveValueClass - override def prepareForUnit(tree: Tree)(using Context): Context = { + override def prepareForUnit(tree: Tree)(using Context): Context = if (lazyValNullables == null) lazyValNullables = ctx.base.collectNullableFieldsPhase.asInstanceOf[CollectNullableFields].lazyValNullables ctx - } override def transformDefDef(tree: DefDef)(using Context): Tree = transformLazyVal(tree) @@ -69,13 +67,13 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { override def transformValDef(tree: ValDef)(using Context): Tree = transformLazyVal(tree) - def transformLazyVal(tree: ValOrDefDef)(using Context): Tree = { + def transformLazyVal(tree: ValOrDefDef)(using Context): Tree = val sym = tree.symbol if (!sym.is(Lazy) || sym.owner.is(Trait) || // val is accessor, lazy field will be implemented in subclass (sym.isStatic && sym.is(Module, butNot = Method))) // static module vals are implemented in the JVM by lazy loading tree - else { + else val isField = sym.owner.isClass if (isField) if sym.isAllOf(SyntheticModule) @@ -92,48 +90,41 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { // not work in that case. However, we can check whether the name is an ExpandedName instead. transformSyntheticModule(tree) else if (sym.isThreadUnsafe || ctx.settings.scalajs.value) - if (sym.is(Module) && !ctx.settings.scalajs.value) { + if (sym.is(Module) && !ctx.settings.scalajs.value) report.error(em"@threadUnsafe is only supported on lazy vals", sym.srcPos) transformMemberDefThreadSafe(tree) - } else transformMemberDefThreadUnsafe(tree) else transformMemberDefThreadSafe(tree) else transformLocalDef(tree) - } - } /** Append offset fields to companion objects */ - override def transformTemplate(template: Template)(using Context): Tree = { + override def transformTemplate(template: Template)(using Context): Tree = val cls = ctx.owner.asClass - appendOffsetDefs.get(cls) match { + appendOffsetDefs.get(cls) match case None => template case Some(data) => data.defs.foreach(defin => defin.symbol.addAnnotation(Annotation(defn.ScalaStaticAnnot, defin.symbol.span))) cpy.Template(template)(body = addInFront(data.defs, template.body)) - } - } - private def addInFront(prefix: List[Tree], stats: List[Tree]) = stats match { + private def addInFront(prefix: List[Tree], stats: List[Tree]) = stats match case first :: rest if isSuperConstrCall(first) => first :: prefix ::: rest case _ => prefix ::: stats - } /** Make an eager val that would implement synthetic module. * Eager val ensures thread safety and has less code generated. * */ - def transformSyntheticModule(tree: ValOrDefDef)(using Context): Thicket = { + def transformSyntheticModule(tree: ValOrDefDef)(using Context): Thicket = val sym = tree.symbol val holderSymbol = newSymbol(sym.owner, LazyLocalName.fresh(sym.asTerm.name), Synthetic, sym.info.widen.resultType).enteredAfter(this) val field = ValDef(holderSymbol, tree.rhs.changeOwnerAfter(sym, holderSymbol, this)) val getter = DefDef(sym.asTerm, ref(holderSymbol)) Thicket(field, getter) - } /** Desugar a local `lazy val x: Int = ` into: * @@ -151,7 +142,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { * def x(): Int = if (x$lzy.initialized()) x$lzy.value() else x$lzycompute() * ``` */ - def transformLocalDef(x: ValOrDefDef)(using Context): Thicket = { + def transformLocalDef(x: ValOrDefDef)(using Context): Thicket = val xname = x.name.asTermName val tpe = x.tpe.widen.resultType.widen @@ -185,19 +176,16 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { report.debuglog(s"found a lazy val ${x.show},\nrewrote with ${holderTree.show}") Thicket(holderTree, initTree, accessor) - } - override def transformStats(trees: List[tpd.Tree])(using Context): List[Tree] = { + override def transformStats(trees: List[tpd.Tree])(using Context): List[Tree] = // backend requires field usage to be after field definition // need to bring containers to start of method val (holders, stats) = - trees.partition { + trees.partition: _.symbol.flags.&~(Touched) == containerFlags // Filtering out Touched is not required currently, as there are no LazyTypes involved here // but just to be more safe - } holders:::stats - } private def nullOut(nullables: List[Symbol])(using Context): List[Tree] = nullables.map { field => @@ -218,7 +206,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { * } * ``` */ - def mkThreadUnsafeDef(sym: Symbol, flag: Symbol, target: Symbol, rhs: Tree)(using Context): DefDef = { + def mkThreadUnsafeDef(sym: Symbol, flag: Symbol, target: Symbol, rhs: Tree)(using Context): DefDef = val targetRef = ref(target) val flagRef = ref(flag) val stats = targetRef.becomes(rhs) :: flagRef.becomes(Literal(Constant(true))) :: nullOut(nullableFor(sym)) @@ -228,7 +216,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { unitLiteral ) DefDef(sym.asTerm, Block(List(init), targetRef.ensureApplied)) - } /** Create thread-unsafe lazy accessor for not-nullable types equivalent to such code * ``` @@ -241,7 +228,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { * } * ``` */ - def mkDefThreadUnsafeNonNullable(sym: Symbol, target: Symbol, rhs: Tree)(using Context): DefDef = { + def mkDefThreadUnsafeNonNullable(sym: Symbol, target: Symbol, rhs: Tree)(using Context): DefDef = val targetRef = ref(target) val stats = targetRef.becomes(rhs) :: nullOut(nullableFor(sym)) val init = If( @@ -250,9 +237,8 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { unitLiteral ) DefDef(sym.asTerm, Block(List(init), targetRef.ensureApplied)) - } - def transformMemberDefThreadUnsafe(x: ValOrDefDef)(using Context): Thicket = { + def transformMemberDefThreadUnsafe(x: ValOrDefDef)(using Context): Thicket = val claz = x.symbol.owner.asClass val tpe = x.tpe.widen.resultType.widen assert(!(x.symbol is Mutable)) @@ -266,13 +252,11 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { if (x.tpe.isNotNull && tpe <:< defn.ObjectType) // can use 'null' value instead of flag Thicket(containerTree, mkDefThreadUnsafeNonNullable(x.symbol, containerSymbol, x.rhs)) - else { + else val flagName = LazyBitMapName.fresh(x.name.asTermName) val flagSymbol = newSymbol(x.symbol.owner, flagName, containerFlags | Private, defn.BooleanType).enteredAfter(this) val flag = ValDef(flagSymbol, Literal(Constant(false))) Thicket(containerTree, flag, mkThreadUnsafeDef(x.symbol, flagSymbol, containerSymbol, x.rhs)) - } - } /** * Create a threadsafe lazy accessor and function that computes the field's value. `Evaluating` and @@ -335,7 +319,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { claz: ClassSymbol, target: Symbol, offset: Tree, - thiz: Tree)(using Context): (DefDef, DefDef) = { + thiz: Tree)(using Context): (DefDef, DefDef) = val tp = memberDef.tpe.widenDealias.resultType.widenDealias val waiting = ref(defn.LazyValsWaitingState) val controlState = ref(defn.LazyValsControlState) @@ -375,17 +359,16 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val accessorDef = DefDef(accessorMethodSymbol, accessorBody) // if observed a null (uninitialized) value - val initialize = { + val initialize = // var result: AnyRef val resSymbNullable = newSymbol(lazyInitMethodSymbol, lazyNme.resultNullable, Synthetic | Mutable, defn.ObjectType) val resSymb = newSymbol(lazyInitMethodSymbol, lazyNme.result, Synthetic | Mutable, defn.ObjectType) // releasing block in finally - val lockRel = { + val lockRel = val lockSymb = newSymbol(lazyInitMethodSymbol, lazyNme.lock, Synthetic, waiting.typeOpt) Block(ValDef(lockSymb, ref(target).cast(waiting.typeOpt)) :: objCasFlag.appliedTo(thiz, offset, ref(lockSymb), ref(resSymb)) :: Nil, ref(lockSymb).select(lazyNme.RLazyVals.waitingRelease).ensureApplied) - } // finally block val fin = If( objCasFlag.appliedTo(thiz, offset, evaluating, ref(resSymb)).select(nme.UNARY_!).appliedToNone, @@ -414,7 +397,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { Return(ref(resSymbNullable), lazyInitMethodSymbol)), unitLiteral ).withType(defn.UnitType) - } val current = newSymbol(lazyInitMethodSymbol, lazyNme.current, Synthetic, defn.ObjectType) val ifNotUninitialized = @@ -444,17 +426,15 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val initMainLoop = WhileDo(EmptyTree, initBody) // becomes: while (true) do { body } val initMethodDef = DefDef(lazyInitMethodSymbol, initMainLoop) (accessorDef, initMethodDef) - } - def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = { + def transformMemberDefThreadSafe(x: ValOrDefDef)(using Context): Thicket = assert(!(x.symbol is Mutable)) if ctx.settings.YlegacyLazyVals.value then transformMemberDefThreadSafeLegacy(x) else transformMemberDefThreadSafeNew(x) - } - def transformMemberDefThreadSafeNew(x: ValOrDefDef)(using Context): Thicket = { + def transformMemberDefThreadSafeNew(x: ValOrDefDef)(using Context): Thicket = import dotty.tools.dotc.core.Types._ import dotty.tools.dotc.core.Flags._ @@ -489,7 +469,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val (accessorDef, initMethodDef) = mkThreadSafeDef(x, claz, containerSymbol, offset, swapOver) Thicket(containerTree, accessorDef, initMethodDef) - } /** Create a threadsafe lazy accessor equivalent to such code * ``` @@ -533,7 +512,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { stateMask: Tree, casFlag: Tree, setFlagState: Tree, - waitOnLock: Tree)(using Context): DefDef = { + waitOnLock: Tree)(using Context): DefDef = val initState = Literal(Constant(0)) val computeState = Literal(Constant(1)) val computedState = Literal(Constant(3)) @@ -549,7 +528,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val stateDef = ValDef(stateSymbol, stateMask.appliedTo(ref(flagSymbol), Literal(Constant(ord)))) val stateRef = ref(stateSymbol) - val compute = { + val compute = val resultSymbol = newSymbol(methodSymbol, lazyNme.result, Synthetic, tp) val resultRef = ref(resultSymbol) val stats = ( @@ -559,9 +538,8 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { setFlagState.appliedTo(thiz, offset, computedState, fieldId)) ) Block(stats, Return(resultRef, methodSymbol)) - } - val retryCase = { + val retryCase = val caseSymbol = newSymbol(methodSymbol, nme.DEFAULT_EXCEPTION_NAME, SyntheticCase, defn.ThrowableType) val triggerRetry = setFlagState.appliedTo(thiz, offset, initState, fieldId) CaseDef( @@ -569,7 +547,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { EmptyTree, Block(List(triggerRetry), Throw(ref(caseSymbol))) ) - } val initialize = If( casFlag.appliedTo(thiz, offset, flagRef, computeState, fieldId), @@ -589,9 +566,8 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val loop = WhileDo(EmptyTree, Block(List(flagDef, stateDef), condition)) DefDef(methodSymbol, loop) - } - def transformMemberDefThreadSafeLegacy(x: ValOrDefDef)(using Context): Thicket = { + def transformMemberDefThreadSafeLegacy(x: ValOrDefDef)(using Context): Thicket = val tpe = x.tpe.widen.resultType.widen val claz = x.symbol.owner.asClass val thizClass = Literal(Constant(claz.info)) @@ -604,7 +580,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { def offsetName(id: Int) = s"${StdNames.nme.LAZY_FIELD_OFFSET}${if (x.symbol.owner.is(Module)) "_m_" else ""}$id".toTermName // compute or create appropriate offsetSymbol, bitmap and bits used by current ValDef - appendOffsetDefs.get(claz) match { + appendOffsetDefs.get(claz) match case Some(info) => val flagsPerLong = (64 / scala.runtime.LazyVals.BITS_PER_LAZY_VAL).toInt info.ord += 1 @@ -615,7 +591,7 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { offsetSymbol = claz.info.decl(offsetById) .suchThat(sym => sym.is(Synthetic) && sym.isTerm) .symbol.asTerm - else { // need to create a new flag + else // need to create a new flag offsetSymbol = newSymbol(claz, offsetById, Synthetic, defn.LongType).enteredAfter(this) offsetSymbol.nn.addAnnotation(Annotation(defn.ScalaStaticAnnot, offsetSymbol.nn.span)) val flagName = LazyBitMapName.fresh(id.toString.toTermName) @@ -624,7 +600,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val fieldTree = thizClass.select(lazyNme.RLazyVals.getDeclaredField).appliedTo(Literal(Constant(flagName.toString))) val offsetTree = ValDef(offsetSymbol.nn, getOffsetStatic.appliedTo(fieldTree)) info.defs = offsetTree :: info.defs - } case None => offsetSymbol = newSymbol(claz, offsetName(0), Synthetic, defn.LongType).enteredAfter(this) @@ -635,7 +610,6 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { val fieldTree = thizClass.select(lazyNme.RLazyVals.getDeclaredField).appliedTo(Literal(Constant(flagName.toString))) val offsetTree = ValDef(offsetSymbol.nn, getOffsetStatic.appliedTo(fieldTree)) appendOffsetDefs += (claz -> new OffsetInfo(List(offsetTree), ord)) - } val containerName = LazyLocalName.fresh(x.name.asTermName) val containerSymbol = newSymbol(claz, containerName, x.symbol.flags &~ containerFlagsMask | containerFlags, tpe, coord = x.symbol.coord).enteredAfter(this) @@ -652,15 +626,13 @@ class LazyVals extends MiniPhase with IdentityDenotTransformer { if (flag eq EmptyTree) Thicket(containerTree, accessor) else Thicket(containerTree, flag, accessor) - } -} -object LazyVals { +object LazyVals: val name: String = "lazyVals" val description: String = "expand lazy vals" - object lazyNme { + object lazyNme: import Names.TermName - object RLazyVals { + object RLazyVals: import scala.runtime.LazyVals.{Names => N} val waitingAwaitRelease: TermName = "await".toTermName val waitingRelease: TermName = "countDown".toTermName @@ -675,7 +647,6 @@ object LazyVals { val getOffset: TermName = N.getOffset.toTermName val getOffsetStatic: TermName = "getOffsetStatic".toTermName val getDeclaredField: TermName = "getDeclaredField".toTermName - } val flag: TermName = "flag".toTermName val state: TermName = "state".toTermName val result: TermName = "result".toTermName @@ -687,5 +658,3 @@ object LazyVals { val current: TermName = "current".toTermName val lock: TermName = "lock".toTermName val discard: TermName = "discard".toTermName - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/LiftTry.scala b/compiler/src/dotty/tools/dotc/transform/LiftTry.scala index 6acb1013d509..0d3244848b00 100644 --- a/compiler/src/dotty/tools/dotc/transform/LiftTry.scala +++ b/compiler/src/dotty/tools/dotc/transform/LiftTry.scala @@ -73,14 +73,13 @@ class LiftTry extends MiniPhase with IdentityDenotTransformer { thisPhase => liftingCtx(false) override def transformTry(tree: Try)(using Context): Tree = - if (needLift && tree.cases.nonEmpty) { + if (needLift && tree.cases.nonEmpty) report.debuglog(i"lifting tree at ${tree.span}, current owner = ${ctx.owner}") val fn = newSymbol( ctx.owner, LiftedTreeName.fresh(), Synthetic | Method, MethodType(Nil, tree.tpe.widenIfUnstable), coord = tree.span) tree.changeOwnerAfter(ctx.owner, fn, thisPhase) Block(DefDef(fn, tree) :: Nil, ref(fn).appliedToNone) - } else tree } object LiftTry: diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala index 7bb7ed365ebe..db7d68fe1a97 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala @@ -9,14 +9,13 @@ import Contexts._ /** A base class for transforms. * A transform contains a compiler phase which applies a tree transformer. */ -abstract class MacroTransform extends Phase { +abstract class MacroTransform extends Phase: import ast.tpd._ - override def run(using Context): Unit = { + override def run(using Context): Unit = val unit = ctx.compilationUnit unit.tpdTree = atPhase(transformPhase)(newTransformer.transform(unit.tpdTree)) - } protected def newTransformer(using Context): Transformer @@ -33,7 +32,7 @@ abstract class MacroTransform extends Phase { override def transform(tree: Tree)(using Context): Tree = try - tree match { + tree match case EmptyValDef => tree case _: PackageDef | _: MemberDef => @@ -47,14 +46,11 @@ abstract class MacroTransform extends Phase { transformStats(impl.body, tree.symbol)) case _ => super.transform(tree) - } - catch { + catch case ex: TypeError => report.error(ex, tree.srcPos) tree - } def transformSelf(vd: ValDef)(using Context): ValDef = cpy.ValDef(vd)(tpt = transform(vd.tpt)) end Transformer -} diff --git a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala index b4e8c3acbc5c..b4bbff31dfce 100644 --- a/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala +++ b/compiler/src/dotty/tools/dotc/transform/MegaPhase.scala @@ -13,7 +13,7 @@ import staging.StagingLevel.* * This is an evolution of the previous "TreeTransformers.scala", which was written by @DarkDimius and * is described in his thesis. */ -object MegaPhase { +object MegaPhase: import ast.tpd._ /** The base class of tree transforms. For each kind of tree K, there are @@ -29,7 +29,7 @@ object MegaPhase { * - Other: to prepape/transform a tree that does not have a specific prepare/transform * method pair. */ - abstract class MiniPhase extends Phase { + abstract class MiniPhase extends Phase: private[MegaPhase] var superPhase: MegaPhase = _ private[MegaPhase] var idxInGroup: Int = _ @@ -134,11 +134,9 @@ object MegaPhase { override def run(using Context): Unit = singletonGroup.run - } -} import MegaPhase._ -class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { +class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase: import ast.tpd._ override val phaseName: String = @@ -148,21 +146,19 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { private var relaxedTypingCache: Boolean = _ private var relaxedTypingKnown = false - override final def relaxedTyping: Boolean = { - if (!relaxedTypingKnown) { + override final def relaxedTyping: Boolean = + if (!relaxedTypingKnown) relaxedTypingCache = miniPhases.exists(_.relaxedTypingInGroup) relaxedTypingKnown = true - } relaxedTypingCache - } private val cpy: TypedTreeCopier = cpyBetweenPhases /** Transform node using all phases in this group that have idxInGroup >= start */ - def transformNode(tree: Tree, start: Int)(using Context): Tree = { + def transformNode(tree: Tree, start: Int)(using Context): Tree = def goNamed(tree: Tree, start: Int) = try - tree match { + tree match case tree: Ident => goIdent(tree, start) case tree: Select => goSelect(tree, start) case tree: ValDef => goValDef(tree, start) @@ -171,15 +167,13 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { case tree: Labeled => goLabeled(tree, start) case tree: Bind => goBind(tree, start) case _ => goOther(tree, start) - } - catch { + catch case ex: TypeError => report.error(ex, tree.srcPos) tree - } def goUnnamed(tree: Tree, start: Int) = try - tree match { + tree match case tree: Apply => goApply(tree, start) case tree: TypeTree => goTypeTree(tree, start) case tree: Thicket => @@ -206,244 +200,201 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { case tree: WhileDo => goWhileDo(tree, start) case tree: Alternative => goAlternative(tree, start) case tree => goOther(tree, start) - } - catch { + catch case ex: TypeError => report.error(ex, tree.srcPos) tree - } if (tree.isInstanceOf[NameTree]) goNamed(tree, start) else goUnnamed(tree, start) - } /** Transform full tree using all phases in this group that have idxInGroup >= start */ - def transformTree(tree: Tree, start: Int)(using Context): Tree = { + def transformTree(tree: Tree, start: Int)(using Context): Tree = inline def inLocalContext[T](inline op: Context ?=> T)(using Context): T = val sym = tree.symbol runWithOwner(if (sym.is(PackageVal)) sym.moduleClass else sym)(op) - def transformNamed(tree: Tree, start: Int, outerCtx: Context): Tree = tree match { + def transformNamed(tree: Tree, start: Int, outerCtx: Context): Tree = tree match case tree: Ident => - inContext(prepIdent(tree, start)(using outerCtx)) { + inContext(prepIdent(tree, start)(using outerCtx)): goIdent(tree, start) - } case tree: Select => - inContext(prepSelect(tree, start)(using outerCtx)) { + inContext(prepSelect(tree, start)(using outerCtx)): val qual = transformTree(tree.qualifier, start) goSelect(cpy.Select(tree)(qual, tree.name), start) - } case tree: ValDef => - inContext(prepValDef(tree, start)(using outerCtx)) { - def mapValDef(using Context) = { + inContext(prepValDef(tree, start)(using outerCtx)): + def mapValDef(using Context) = val tpt = transformTree(tree.tpt, start) val rhs = transformTree(tree.rhs, start) cpy.ValDef(tree)(tree.name, tpt, rhs) - } if tree.isEmpty then tree else goValDef( if tree.symbol.exists then inLocalContext(mapValDef) else mapValDef, start) - } case tree: DefDef => - inContext(prepDefDef(tree, start)(using outerCtx)) { - def mapDefDef(using Context) = { + inContext(prepDefDef(tree, start)(using outerCtx)): + def mapDefDef(using Context) = val paramss = tree.paramss.mapConserve(transformSpecificTrees(_, start)) .asInstanceOf[List[ParamClause]] val tpt = transformTree(tree.tpt, start) val rhs = transformTree(tree.rhs, start) cpy.DefDef(tree)(tree.name, paramss, tpt, rhs) - } goDefDef(inLocalContext(mapDefDef), start) - } case tree: TypeDef => - inContext(prepTypeDef(tree, start)(using outerCtx)) { + inContext(prepTypeDef(tree, start)(using outerCtx)): val rhs = inLocalContext(transformTree(tree.rhs, start)) goTypeDef(cpy.TypeDef(tree)(tree.name, rhs), start) - } case tree: Labeled => - inContext(prepLabeled(tree, start)(using outerCtx)) { + inContext(prepLabeled(tree, start)(using outerCtx)): val bind = transformTree(tree.bind, start).asInstanceOf[Bind] val expr = transformTree(tree.expr, start) goLabeled(cpy.Labeled(tree)(bind, expr), start) - } case tree: Bind => - inContext(prepBind(tree, start)(using outerCtx)) { + inContext(prepBind(tree, start)(using outerCtx)): val body = transformTree(tree.body, start) goBind(cpy.Bind(tree)(tree.name, body), start) - } case _ => - inContext(prepOther(tree, start)(using outerCtx)) { + inContext(prepOther(tree, start)(using outerCtx)): goOther(tree, start) - } - } - def transformUnnamed(tree: Tree, start: Int, outerCtx: Context): Tree = tree match { + def transformUnnamed(tree: Tree, start: Int, outerCtx: Context): Tree = tree match case tree: Apply => - inContext(prepApply(tree, start)(using outerCtx)) { + inContext(prepApply(tree, start)(using outerCtx)): val fun = transformTree(tree.fun, start) val args = transformTrees(tree.args, start) goApply(cpy.Apply(tree)(fun, args), start) - } case tree: TypeTree => - inContext(prepTypeTree(tree, start)(using outerCtx)) { + inContext(prepTypeTree(tree, start)(using outerCtx)): goTypeTree(tree, start) - } case tree: Thicket => cpy.Thicket(tree)(transformTrees(tree.trees, start)) case tree: This => - inContext(prepThis(tree, start)(using outerCtx)) { + inContext(prepThis(tree, start)(using outerCtx)): goThis(tree, start) - } case tree: Literal => - inContext(prepLiteral(tree, start)(using outerCtx)) { + inContext(prepLiteral(tree, start)(using outerCtx)): goLiteral(tree, start) - } case tree: Block => - inContext(prepBlock(tree, start)(using outerCtx)) { + inContext(prepBlock(tree, start)(using outerCtx)): transformBlock(tree, start) - } case tree: TypeApply => - inContext(prepTypeApply(tree, start)(using outerCtx)) { + inContext(prepTypeApply(tree, start)(using outerCtx)): val fun = transformTree(tree.fun, start) val args = transformTrees(tree.args, start) goTypeApply(cpy.TypeApply(tree)(fun, args), start) - } case tree: If => - inContext(prepIf(tree, start)(using outerCtx)) { + inContext(prepIf(tree, start)(using outerCtx)): val cond = transformTree(tree.cond, start) val thenp = transformTree(tree.thenp, start) val elsep = transformTree(tree.elsep, start) goIf(cpy.If(tree)(cond, thenp, elsep), start) - } case tree: New => - inContext(prepNew(tree, start)(using outerCtx)) { + inContext(prepNew(tree, start)(using outerCtx)): val tpt = transformTree(tree.tpt, start) goNew(cpy.New(tree)(tpt), start) - } case tree: Typed => - inContext(prepTyped(tree, start)(using outerCtx)) { + inContext(prepTyped(tree, start)(using outerCtx)): val expr = transformTree(tree.expr, start) val tpt = transformTree(tree.tpt, start) goTyped(cpy.Typed(tree)(expr, tpt), start) - } case tree: CaseDef => - inContext(prepCaseDef(tree, start)(using outerCtx)) { + inContext(prepCaseDef(tree, start)(using outerCtx)): val pat = withMode(Mode.Pattern)(transformTree(tree.pat, start)) val guard = transformTree(tree.guard, start) val body = transformTree(tree.body, start) goCaseDef(cpy.CaseDef(tree)(pat, guard, body), start) - } case tree: Closure => - inContext(prepClosure(tree, start)(using outerCtx)) { + inContext(prepClosure(tree, start)(using outerCtx)): val env = transformTrees(tree.env, start) val meth = transformTree(tree.meth, start) val tpt = transformTree(tree.tpt, start) goClosure(cpy.Closure(tree)(env, meth, tpt), start) - } case tree: Assign => - inContext(prepAssign(tree, start)(using outerCtx)) { + inContext(prepAssign(tree, start)(using outerCtx)): val lhs = transformTree(tree.lhs, start) val rhs = transformTree(tree.rhs, start) goAssign(cpy.Assign(tree)(lhs, rhs), start) - } case tree: SeqLiteral => - inContext(prepSeqLiteral(tree, start)(using outerCtx)) { + inContext(prepSeqLiteral(tree, start)(using outerCtx)): val elems = transformTrees(tree.elems, start) val elemtpt = transformTree(tree.elemtpt, start) goSeqLiteral(cpy.SeqLiteral(tree)(elems, elemtpt), start) - } case tree: Super => - inContext(prepSuper(tree, start)(using outerCtx)) { + inContext(prepSuper(tree, start)(using outerCtx)): goSuper(tree, start) - } case tree: Template => - inContext(prepTemplate(tree, start)(using outerCtx)) { + inContext(prepTemplate(tree, start)(using outerCtx)): val constr = transformSpecificTree(tree.constr, start) val parents = transformTrees(tree.parents, start)(using ctx.superCallContext) val self = transformSpecificTree(tree.self, start) val body = transformStats(tree.body, tree.symbol, start) goTemplate(cpy.Template(tree)(constr, parents, Nil, self, body), start) - } case tree: Match => - inContext(prepMatch(tree, start)(using outerCtx)) { + inContext(prepMatch(tree, start)(using outerCtx)): val selector = transformTree(tree.selector, start) val cases = transformSpecificTrees(tree.cases, start) goMatch(cpy.Match(tree)(selector, cases), start) - } case tree: UnApply => - inContext(prepUnApply(tree, start)(using outerCtx)) { + inContext(prepUnApply(tree, start)(using outerCtx)): val fun = transformTree(tree.fun, start) val implicits = transformTrees(tree.implicits, start) val patterns = transformTrees(tree.patterns, start) goUnApply(cpy.UnApply(tree)(fun, implicits, patterns), start) - } case tree: PackageDef => - inContext(prepPackageDef(tree, start)(using outerCtx)) { - def mapPackage(using Context) = { + inContext(prepPackageDef(tree, start)(using outerCtx)): + def mapPackage(using Context) = val pid = transformSpecificTree(tree.pid, start) val stats = transformStats(tree.stats, tree.symbol, start) cpy.PackageDef(tree)(pid, stats) - } goPackageDef(inLocalContext(mapPackage), start) - } case tree: Try => - inContext(prepTry(tree, start)(using outerCtx)) { + inContext(prepTry(tree, start)(using outerCtx)): val expr = transformTree(tree.expr, start) val cases = transformSpecificTrees(tree.cases, start) val finalizer = transformTree(tree.finalizer, start) goTry(cpy.Try(tree)(expr, cases, finalizer), start) - } case tree: Inlined => - inContext(prepInlined(tree, start)(using outerCtx)) { + inContext(prepInlined(tree, start)(using outerCtx)): val bindings = transformSpecificTrees(tree.bindings, start) val expansion = transformTree(tree.expansion, start)(using inlineContext(tree.call)) goInlined(cpy.Inlined(tree)(tree.call, bindings, expansion), start) - } case tree: Quote => - inContext(prepQuote(tree, start)(using outerCtx)) { + inContext(prepQuote(tree, start)(using outerCtx)): val body = transformTree(tree.body, start)(using quoteContext) goQuote(cpy.Quote(tree)(body, Nil), start) - } case tree: Splice => - inContext(prepSplice(tree, start)(using outerCtx)) { + inContext(prepSplice(tree, start)(using outerCtx)): val expr = transformTree(tree.expr, start)(using spliceContext) goSplice(cpy.Splice(tree)(expr), start) - } case tree: Return => - inContext(prepReturn(tree, start)(using outerCtx)) { + inContext(prepReturn(tree, start)(using outerCtx)): val expr = transformTree(tree.expr, start) goReturn(cpy.Return(tree)(expr, tree.from), start) // don't transform `tree.from`, as this is not a normal ident, but // a pointer to the enclosing method. - } case tree: WhileDo => - inContext(prepWhileDo(tree, start)(using outerCtx)) { + inContext(prepWhileDo(tree, start)(using outerCtx)): val cond = transformTree(tree.cond, start) val body = transformTree(tree.body, start) goWhileDo(cpy.WhileDo(tree)(cond, body), start) - } case tree: Alternative => - inContext(prepAlternative(tree, start)(using outerCtx)) { + inContext(prepAlternative(tree, start)(using outerCtx)): val trees = transformTrees(tree.trees, start) goAlternative(cpy.Alternative(tree)(trees), start) - } case tree => - inContext(prepOther(tree, start)(using outerCtx)) { + inContext(prepOther(tree, start)(using outerCtx)): goOther(tree, start) - } - } // try - if (tree.source != ctx.source && tree.source.exists) + if (tree.source != ctx.source && tree.source.exists) transformTree(tree, start)(using ctx.withSource(tree.source)) - else if (tree.isInstanceOf[NameTree]) + else if (tree.isInstanceOf[NameTree]) transformNamed(tree, start, ctx) - else + else transformUnnamed(tree, start, ctx) // catch case ex: AssertionError => // println(i"error while transforming $tree") // throw ex - } def transformSpecificTree[T <: Tree](tree: T, start: Int)(using Context): T = transformTree(tree, start).asInstanceOf[T] @@ -464,11 +415,10 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { })(using nestedCtx) goBlock(block1, start) - def transformUnit(tree: Tree)(using Context): Tree = { + def transformUnit(tree: Tree)(using Context): Tree = val nestedCtx = prepUnit(tree, 0) val tree1 = transformTree(tree, 0)(using nestedCtx) goUnit(tree1, 0)(using nestedCtx) - } def transformTrees(trees: List[Tree], start: Int)(using Context): List[Tree] = trees.flattenedMapConserve(transformTree(_, start)) @@ -488,36 +438,30 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { /** Does `phase` contain a redefinition of method `name`? * (which is a method of MiniPhase) */ - private def defines(phase: MiniPhase, name: String) = { + private def defines(phase: MiniPhase, name: String) = def hasRedefinedMethod(cls: Class[?]): Boolean = if (cls.eq(classOf[MiniPhase])) false - else { + else var clsMethods = clsMethodsCache.get(cls) - if (clsMethods == null) { + if (clsMethods == null) clsMethods = cls.getDeclaredMethods clsMethodsCache.put(cls, clsMethods) - } clsMethods.nn.exists(_.nn.getName == name) || hasRedefinedMethod(cls.getSuperclass.nn) - } hasRedefinedMethod(phase.getClass) - } private def newNxArray = new Array[MiniPhase | Null](miniPhases.length + 1) private val emptyNxArray = newNxArray - private def init(methName: String): Array[MiniPhase | Null] = { + private def init(methName: String): Array[MiniPhase | Null] = var nx: Array[MiniPhase | Null] = emptyNxArray - for (idx <- miniPhases.length - 1 to 0 by -1) { + for (idx <- miniPhases.length - 1 to 0 by -1) val subPhase = miniPhases(idx) - if (defines(subPhase, methName)) { + if (defines(subPhase, methName)) if (nx eq emptyNxArray) nx = newNxArray nx(idx) = subPhase - } else if (nx ne emptyNxArray) nx(idx) = nx(idx + 1) - } nx - } private val nxIdentPrepPhase = init("prepareForIdent") private val nxIdentTransPhase = init("transformIdent") @@ -590,526 +534,422 @@ class MegaPhase(val miniPhases: Array[MiniPhase]) extends Phase { private val nxOtherPrepPhase = init("prepareForOther") private val nxOtherTransPhase = init("transformOther") - for ((phase, idx) <- miniPhases.zipWithIndex) { + for ((phase, idx) <- miniPhases.zipWithIndex) phase.superPhase = this phase.idxInGroup = idx - } // Boilerplate snippets - def prepIdent(tree: Ident, start: Int)(using Context): Context = { + def prepIdent(tree: Ident, start: Int)(using Context): Context = val phase = nxIdentPrepPhase(start) if (phase == null) ctx else prepIdent(tree, phase.idxInGroup + 1)(using phase.prepareForIdent(tree)) - } - def goIdent(tree: Ident, start: Int)(using Context): Tree = { + def goIdent(tree: Ident, start: Int)(using Context): Tree = val phase = nxIdentTransPhase(start) if (phase == null) tree - else phase.transformIdent(tree) match { + else phase.transformIdent(tree) match case tree1: Ident => goIdent(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepSelect(tree: Select, start: Int)(using Context): Context = { + def prepSelect(tree: Select, start: Int)(using Context): Context = val phase = nxSelectPrepPhase(start) if (phase == null) ctx else prepSelect(tree, phase.idxInGroup + 1)(using phase.prepareForSelect(tree)) - } - def goSelect(tree: Select, start: Int)(using Context): Tree = { + def goSelect(tree: Select, start: Int)(using Context): Tree = val phase = nxSelectTransPhase(start) if (phase == null) tree - else phase.transformSelect(tree) match { + else phase.transformSelect(tree) match case tree1: Select => goSelect(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepThis(tree: This, start: Int)(using Context): Context = { + def prepThis(tree: This, start: Int)(using Context): Context = val phase = nxThisPrepPhase(start) if (phase == null) ctx else prepThis(tree, phase.idxInGroup + 1)(using phase.prepareForThis(tree)) - } - def goThis(tree: This, start: Int)(using Context): Tree = { + def goThis(tree: This, start: Int)(using Context): Tree = val phase = nxThisTransPhase(start) if (phase == null) tree - else phase.transformThis(tree) match { + else phase.transformThis(tree) match case tree1: This => goThis(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepSuper(tree: Super, start: Int)(using Context): Context = { + def prepSuper(tree: Super, start: Int)(using Context): Context = val phase = nxSuperPrepPhase(start) if (phase == null) ctx else prepSuper(tree, phase.idxInGroup + 1)(using phase.prepareForSuper(tree)) - } - def goSuper(tree: Super, start: Int)(using Context): Tree = { + def goSuper(tree: Super, start: Int)(using Context): Tree = val phase = nxSuperTransPhase(start) if (phase == null) tree - else phase.transformSuper(tree) match { + else phase.transformSuper(tree) match case tree1: Super => goSuper(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepApply(tree: Apply, start: Int)(using Context): Context = { + def prepApply(tree: Apply, start: Int)(using Context): Context = val phase = nxApplyPrepPhase(start) if (phase == null) ctx else prepApply(tree, phase.idxInGroup + 1)(using phase.prepareForApply(tree)) - } - def goApply(tree: Apply, start: Int)(using Context): Tree = { + def goApply(tree: Apply, start: Int)(using Context): Tree = val phase = nxApplyTransPhase(start) if (phase == null) tree - else phase.transformApply(tree) match { + else phase.transformApply(tree) match case tree1: Apply => goApply(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepTypeApply(tree: TypeApply, start: Int)(using Context): Context = { + def prepTypeApply(tree: TypeApply, start: Int)(using Context): Context = val phase = nxTypeApplyPrepPhase(start) if (phase == null) ctx else prepTypeApply(tree, phase.idxInGroup + 1)(using phase.prepareForTypeApply(tree)) - } - def goTypeApply(tree: TypeApply, start: Int)(using Context): Tree = { + def goTypeApply(tree: TypeApply, start: Int)(using Context): Tree = val phase = nxTypeApplyTransPhase(start) if (phase == null) tree - else phase.transformTypeApply(tree) match { + else phase.transformTypeApply(tree) match case tree1: TypeApply => goTypeApply(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepLiteral(tree: Literal, start: Int)(using Context): Context = { + def prepLiteral(tree: Literal, start: Int)(using Context): Context = val phase = nxLiteralPrepPhase(start) if (phase == null) ctx else prepLiteral(tree, phase.idxInGroup + 1)(using phase.prepareForLiteral(tree)) - } - def goLiteral(tree: Literal, start: Int)(using Context): Tree = { + def goLiteral(tree: Literal, start: Int)(using Context): Tree = val phase = nxLiteralTransPhase(start) if (phase == null) tree - else phase.transformLiteral(tree) match { + else phase.transformLiteral(tree) match case tree1: Literal => goLiteral(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepNew(tree: New, start: Int)(using Context): Context = { + def prepNew(tree: New, start: Int)(using Context): Context = val phase = nxNewPrepPhase(start) if (phase == null) ctx else prepNew(tree, phase.idxInGroup + 1)(using phase.prepareForNew(tree)) - } - def goNew(tree: New, start: Int)(using Context): Tree = { + def goNew(tree: New, start: Int)(using Context): Tree = val phase = nxNewTransPhase(start) if (phase == null) tree - else phase.transformNew(tree) match { + else phase.transformNew(tree) match case tree1: New => goNew(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepTyped(tree: Typed, start: Int)(using Context): Context = { + def prepTyped(tree: Typed, start: Int)(using Context): Context = val phase = nxTypedPrepPhase(start) if (phase == null) ctx else prepTyped(tree, phase.idxInGroup + 1)(using phase.prepareForTyped(tree)) - } - def goTyped(tree: Typed, start: Int)(using Context): Tree = { + def goTyped(tree: Typed, start: Int)(using Context): Tree = val phase = nxTypedTransPhase(start) if (phase == null) tree - else phase.transformTyped(tree) match { + else phase.transformTyped(tree) match case tree1: Typed => goTyped(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepAssign(tree: Assign, start: Int)(using Context): Context = { + def prepAssign(tree: Assign, start: Int)(using Context): Context = val phase = nxAssignPrepPhase(start) if (phase == null) ctx else prepAssign(tree, phase.idxInGroup + 1)(using phase.prepareForAssign(tree)) - } - def goAssign(tree: Assign, start: Int)(using Context): Tree = { + def goAssign(tree: Assign, start: Int)(using Context): Tree = val phase = nxAssignTransPhase(start) if (phase == null) tree - else phase.transformAssign(tree) match { + else phase.transformAssign(tree) match case tree1: Assign => goAssign(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepBlock(tree: Block, start: Int)(using Context): Context = { + def prepBlock(tree: Block, start: Int)(using Context): Context = val phase = nxBlockPrepPhase(start) if (phase == null) ctx else prepBlock(tree, phase.idxInGroup + 1)(using phase.prepareForBlock(tree)) - } - def goBlock(tree: Block, start: Int)(using Context): Tree = { + def goBlock(tree: Block, start: Int)(using Context): Tree = val phase = nxBlockTransPhase(start) if (phase == null) tree - else phase.transformBlock(tree) match { + else phase.transformBlock(tree) match case tree1: Block => goBlock(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepIf(tree: If, start: Int)(using Context): Context = { + def prepIf(tree: If, start: Int)(using Context): Context = val phase = nxIfPrepPhase(start) if (phase == null) ctx else prepIf(tree, phase.idxInGroup + 1)(using phase.prepareForIf(tree)) - } - def goIf(tree: If, start: Int)(using Context): Tree = { + def goIf(tree: If, start: Int)(using Context): Tree = val phase = nxIfTransPhase(start) if (phase == null) tree - else phase.transformIf(tree) match { + else phase.transformIf(tree) match case tree1: If => goIf(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepClosure(tree: Closure, start: Int)(using Context): Context = { + def prepClosure(tree: Closure, start: Int)(using Context): Context = val phase = nxClosurePrepPhase(start) if (phase == null) ctx else prepClosure(tree, phase.idxInGroup + 1)(using phase.prepareForClosure(tree)) - } - def goClosure(tree: Closure, start: Int)(using Context): Tree = { + def goClosure(tree: Closure, start: Int)(using Context): Tree = val phase = nxClosureTransPhase(start) if (phase == null) tree - else phase.transformClosure(tree) match { + else phase.transformClosure(tree) match case tree1: Closure => goClosure(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepMatch(tree: Match, start: Int)(using Context): Context = { + def prepMatch(tree: Match, start: Int)(using Context): Context = val phase = nxMatchPrepPhase(start) if (phase == null) ctx else prepMatch(tree, phase.idxInGroup + 1)(using phase.prepareForMatch(tree)) - } - def goMatch(tree: Match, start: Int)(using Context): Tree = { + def goMatch(tree: Match, start: Int)(using Context): Tree = val phase = nxMatchTransPhase(start) if (phase == null) tree - else phase.transformMatch(tree) match { + else phase.transformMatch(tree) match case tree1: Match => goMatch(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepCaseDef(tree: CaseDef, start: Int)(using Context): Context = { + def prepCaseDef(tree: CaseDef, start: Int)(using Context): Context = val phase = nxCaseDefPrepPhase(start) if (phase == null) ctx else prepCaseDef(tree, phase.idxInGroup + 1)(using phase.prepareForCaseDef(tree)) - } - def goCaseDef(tree: CaseDef, start: Int)(using Context): Tree = { + def goCaseDef(tree: CaseDef, start: Int)(using Context): Tree = val phase = nxCaseDefTransPhase(start) if (phase == null) tree - else phase.transformCaseDef(tree) match { + else phase.transformCaseDef(tree) match case tree1: CaseDef => goCaseDef(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepLabeled(tree: Labeled, start: Int)(using Context): Context = { + def prepLabeled(tree: Labeled, start: Int)(using Context): Context = val phase = nxLabeledPrepPhase(start) if (phase == null) ctx else prepLabeled(tree, phase.idxInGroup + 1)(using phase.prepareForLabeled(tree)) - } - def goLabeled(tree: Labeled, start: Int)(using Context): Tree = { + def goLabeled(tree: Labeled, start: Int)(using Context): Tree = val phase = nxLabeledTransPhase(start) if (phase == null) tree - else phase.transformLabeled(tree) match { + else phase.transformLabeled(tree) match case tree1: Labeled => goLabeled(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepReturn(tree: Return, start: Int)(using Context): Context = { + def prepReturn(tree: Return, start: Int)(using Context): Context = val phase = nxReturnPrepPhase(start) if (phase == null) ctx else prepReturn(tree, phase.idxInGroup + 1)(using phase.prepareForReturn(tree)) - } - def goReturn(tree: Return, start: Int)(using Context): Tree = { + def goReturn(tree: Return, start: Int)(using Context): Tree = val phase = nxReturnTransPhase(start) if (phase == null) tree - else phase.transformReturn(tree) match { + else phase.transformReturn(tree) match case tree1: Return => goReturn(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepWhileDo(tree: WhileDo, start: Int)(using Context): Context = { + def prepWhileDo(tree: WhileDo, start: Int)(using Context): Context = val phase = nxWhileDoPrepPhase(start) if (phase == null) ctx else prepWhileDo(tree, phase.idxInGroup + 1)(using phase.prepareForWhileDo(tree)) - } - def goWhileDo(tree: WhileDo, start: Int)(using Context): Tree = { + def goWhileDo(tree: WhileDo, start: Int)(using Context): Tree = val phase = nxWhileDoTransPhase(start) if (phase == null) tree - else phase.transformWhileDo(tree) match { + else phase.transformWhileDo(tree) match case tree1: WhileDo => goWhileDo(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepTry(tree: Try, start: Int)(using Context): Context = { + def prepTry(tree: Try, start: Int)(using Context): Context = val phase = nxTryPrepPhase(start) if (phase == null) ctx else prepTry(tree, phase.idxInGroup + 1)(using phase.prepareForTry(tree)) - } - def goTry(tree: Try, start: Int)(using Context): Tree = { + def goTry(tree: Try, start: Int)(using Context): Tree = val phase = nxTryTransPhase(start) if (phase == null) tree - else phase.transformTry(tree) match { + else phase.transformTry(tree) match case tree1: Try => goTry(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepSeqLiteral(tree: SeqLiteral, start: Int)(using Context): Context = { + def prepSeqLiteral(tree: SeqLiteral, start: Int)(using Context): Context = val phase = nxSeqLiteralPrepPhase(start) if (phase == null) ctx else prepSeqLiteral(tree, phase.idxInGroup + 1)(using phase.prepareForSeqLiteral(tree)) - } - def goSeqLiteral(tree: SeqLiteral, start: Int)(using Context): Tree = { + def goSeqLiteral(tree: SeqLiteral, start: Int)(using Context): Tree = val phase = nxSeqLiteralTransPhase(start) if (phase == null) tree - else phase.transformSeqLiteral(tree) match { + else phase.transformSeqLiteral(tree) match case tree1: SeqLiteral => goSeqLiteral(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepInlined(tree: Inlined, start: Int)(using Context): Context = { + def prepInlined(tree: Inlined, start: Int)(using Context): Context = val phase = nxInlinedPrepPhase(start) if (phase == null) ctx else prepInlined(tree, phase.idxInGroup + 1)(using phase.prepareForInlined(tree)) - } - def goInlined(tree: Inlined, start: Int)(using Context): Tree = { + def goInlined(tree: Inlined, start: Int)(using Context): Tree = val phase = nxInlinedTransPhase(start) if (phase == null) tree - else phase.transformInlined(tree) match { + else phase.transformInlined(tree) match case tree1: Inlined => goInlined(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepQuote(tree: Quote, start: Int)(using Context): Context = { + def prepQuote(tree: Quote, start: Int)(using Context): Context = val phase = nxQuotePrepPhase(start) if (phase == null) ctx else prepQuote(tree, phase.idxInGroup + 1)(using phase.prepareForQuote(tree)) - } - def goQuote(tree: Quote, start: Int)(using Context): Tree = { + def goQuote(tree: Quote, start: Int)(using Context): Tree = val phase = nxQuoteTransPhase(start) if (phase == null) tree - else phase.transformQuote(tree) match { + else phase.transformQuote(tree) match case tree1: Quote => goQuote(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepSplice(tree: Splice, start: Int)(using Context): Context = { + def prepSplice(tree: Splice, start: Int)(using Context): Context = val phase = nxSplicePrepPhase(start) if (phase == null) ctx else prepSplice(tree, phase.idxInGroup + 1)(using phase.prepareForSplice(tree)) - } - def goSplice(tree: Splice, start: Int)(using Context): Tree = { + def goSplice(tree: Splice, start: Int)(using Context): Tree = val phase = nxSpliceTransPhase(start) if (phase == null) tree - else phase.transformSplice(tree) match { + else phase.transformSplice(tree) match case tree1: Splice => goSplice(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepTypeTree(tree: TypeTree, start: Int)(using Context): Context = { + def prepTypeTree(tree: TypeTree, start: Int)(using Context): Context = val phase = nxTypeTreePrepPhase(start) if (phase == null) ctx else prepTypeTree(tree, phase.idxInGroup + 1)(using phase.prepareForTypeTree(tree)) - } - def goTypeTree(tree: TypeTree, start: Int)(using Context): Tree = { + def goTypeTree(tree: TypeTree, start: Int)(using Context): Tree = val phase = nxTypeTreeTransPhase(start) if (phase == null) tree - else phase.transformTypeTree(tree) match { + else phase.transformTypeTree(tree) match case tree1: TypeTree => goTypeTree(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepBind(tree: Bind, start: Int)(using Context): Context = { + def prepBind(tree: Bind, start: Int)(using Context): Context = val phase = nxBindPrepPhase(start) if (phase == null) ctx else prepBind(tree, phase.idxInGroup + 1)(using phase.prepareForBind(tree)) - } - def goBind(tree: Bind, start: Int)(using Context): Tree = { + def goBind(tree: Bind, start: Int)(using Context): Tree = val phase = nxBindTransPhase(start) if (phase == null) tree - else phase.transformBind(tree) match { + else phase.transformBind(tree) match case tree1: Bind => goBind(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepAlternative(tree: Alternative, start: Int)(using Context): Context = { + def prepAlternative(tree: Alternative, start: Int)(using Context): Context = val phase = nxAlternativePrepPhase(start) if (phase == null) ctx else prepAlternative(tree, phase.idxInGroup + 1)(using phase.prepareForAlternative(tree)) - } - def goAlternative(tree: Alternative, start: Int)(using Context): Tree = { + def goAlternative(tree: Alternative, start: Int)(using Context): Tree = val phase = nxAlternativeTransPhase(start) if (phase == null) tree - else phase.transformAlternative(tree) match { + else phase.transformAlternative(tree) match case tree1: Alternative => goAlternative(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepUnApply(tree: UnApply, start: Int)(using Context): Context = { + def prepUnApply(tree: UnApply, start: Int)(using Context): Context = val phase = nxUnApplyPrepPhase(start) if (phase == null) ctx else prepUnApply(tree, phase.idxInGroup + 1)(using phase.prepareForUnApply(tree)) - } - def goUnApply(tree: UnApply, start: Int)(using Context): Tree = { + def goUnApply(tree: UnApply, start: Int)(using Context): Tree = val phase = nxUnApplyTransPhase(start) if (phase == null) tree - else phase.transformUnApply(tree) match { + else phase.transformUnApply(tree) match case tree1: UnApply => goUnApply(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepValDef(tree: ValDef, start: Int)(using Context): Context = { + def prepValDef(tree: ValDef, start: Int)(using Context): Context = val phase = nxValDefPrepPhase(start) if (phase == null) ctx else prepValDef(tree, phase.idxInGroup + 1)(using phase.prepareForValDef(tree)) - } - def goValDef(tree: ValDef, start: Int)(using Context): Tree = { + def goValDef(tree: ValDef, start: Int)(using Context): Tree = val phase = nxValDefTransPhase(start) if (phase == null) tree - else phase.transformValDef(tree) match { + else phase.transformValDef(tree) match case tree1: ValDef => goValDef(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepDefDef(tree: DefDef, start: Int)(using Context): Context = { + def prepDefDef(tree: DefDef, start: Int)(using Context): Context = val phase = nxDefDefPrepPhase(start) if (phase == null) ctx else prepDefDef(tree, phase.idxInGroup + 1)(using phase.prepareForDefDef(tree)) - } - def goDefDef(tree: DefDef, start: Int)(using Context): Tree = { + def goDefDef(tree: DefDef, start: Int)(using Context): Tree = val phase = nxDefDefTransPhase(start) if (phase == null) tree - else phase.transformDefDef(tree) match { + else phase.transformDefDef(tree) match case tree1: DefDef => goDefDef(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepTypeDef(tree: TypeDef, start: Int)(using Context): Context = { + def prepTypeDef(tree: TypeDef, start: Int)(using Context): Context = val phase = nxTypeDefPrepPhase(start) if (phase == null) ctx else prepTypeDef(tree, phase.idxInGroup + 1)(using phase.prepareForTypeDef(tree)) - } - def goTypeDef(tree: TypeDef, start: Int)(using Context): Tree = { + def goTypeDef(tree: TypeDef, start: Int)(using Context): Tree = val phase = nxTypeDefTransPhase(start) if (phase == null) tree - else phase.transformTypeDef(tree) match { + else phase.transformTypeDef(tree) match case tree1: TypeDef => goTypeDef(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepTemplate(tree: Template, start: Int)(using Context): Context = { + def prepTemplate(tree: Template, start: Int)(using Context): Context = val phase = nxTemplatePrepPhase(start) if (phase == null) ctx else prepTemplate(tree, phase.idxInGroup + 1)(using phase.prepareForTemplate(tree)) - } - def goTemplate(tree: Template, start: Int)(using Context): Tree = { + def goTemplate(tree: Template, start: Int)(using Context): Tree = val phase = nxTemplateTransPhase(start) if (phase == null) tree - else phase.transformTemplate(tree) match { + else phase.transformTemplate(tree) match case tree1: Template => goTemplate(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepPackageDef(tree: PackageDef, start: Int)(using Context): Context = { + def prepPackageDef(tree: PackageDef, start: Int)(using Context): Context = val phase = nxPackageDefPrepPhase(start) if (phase == null) ctx else prepPackageDef(tree, phase.idxInGroup + 1)(using phase.prepareForPackageDef(tree)) - } - def goPackageDef(tree: PackageDef, start: Int)(using Context): Tree = { + def goPackageDef(tree: PackageDef, start: Int)(using Context): Tree = val phase = nxPackageDefTransPhase(start) if (phase == null) tree - else phase.transformPackageDef(tree) match { + else phase.transformPackageDef(tree) match case tree1: PackageDef => goPackageDef(tree1, phase.idxInGroup + 1) case tree1 => transformNode(tree1, phase.idxInGroup + 1) - } - } - def prepStats(trees: List[Tree], start: Int)(using Context): Context = { + def prepStats(trees: List[Tree], start: Int)(using Context): Context = val phase = nxStatsPrepPhase(start) if (phase == null) ctx else prepStats(trees, phase.idxInGroup + 1)(using phase.prepareForStats(trees)) - } - def goStats(trees: List[Tree], start: Int)(using Context): List[Tree] = { + def goStats(trees: List[Tree], start: Int)(using Context): List[Tree] = val phase = nxStatsTransPhase(start) if (phase == null) trees else goStats(phase.transformStats(trees), phase.idxInGroup + 1) - } - def prepUnit(tree: Tree, start: Int)(using Context): Context = { + def prepUnit(tree: Tree, start: Int)(using Context): Context = val phase = nxUnitPrepPhase(start) if (phase == null) ctx else prepUnit(tree, phase.idxInGroup + 1)(using phase.prepareForUnit(tree)) - } - def goUnit(tree: Tree, start: Int)(using Context): Tree = { + def goUnit(tree: Tree, start: Int)(using Context): Tree = val phase = nxUnitTransPhase(start) if (phase == null) tree else goUnit(phase.transformUnit(tree), phase.idxInGroup + 1) - } - def prepOther(tree: Tree, start: Int)(using Context): Context = { + def prepOther(tree: Tree, start: Int)(using Context): Context = val phase = nxOtherPrepPhase(start) if (phase == null) ctx else prepOther(tree, phase.idxInGroup + 1)(using phase.prepareForOther(tree)) - } - def goOther(tree: Tree, start: Int)(using Context): Tree = { + def goOther(tree: Tree, start: Int)(using Context): Tree = val phase = nxOtherTransPhase(start) if (phase == null) tree else goOther(phase.transformOther(tree), phase.idxInGroup + 1) - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/Memoize.scala b/compiler/src/dotty/tools/dotc/transform/Memoize.scala index 03ac15b39ffe..428a6522f09a 100644 --- a/compiler/src/dotty/tools/dotc/transform/Memoize.scala +++ b/compiler/src/dotty/tools/dotc/transform/Memoize.scala @@ -20,14 +20,12 @@ import sjs.JSSymUtils._ import util.Store -object Memoize { +object Memoize: val name: String = "memoize" val description: String = "add private fields to getters and setters" - private final class MyState { + private final class MyState: val classesThatNeedReleaseFence = new util.HashSet[Symbol] - } -} /** Provides the implementations of all getters and setters, introducing * fields to hold the value accessed by them. @@ -60,13 +58,12 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => /* Makes sure that, after getters and constructors gen, there doesn't * exist non-deferred definitions that are not implemented. */ - override def checkPostCondition(tree: Tree)(using Context): Unit = { - def errorLackImplementation(t: Tree) = { + override def checkPostCondition(tree: Tree)(using Context): Unit = + def errorLackImplementation(t: Tree) = val definingPhase = phaseOf(t.symbol.initial.validFor.firstPhaseId) throw new AssertionError( i"Non-deferred definition introduced by $definingPhase lacks implementation: $t") - } - tree match { + tree match case ddef: DefDef if !ddef.symbol.is(Deferred) && !ddef.symbol.isConstructor && // constructors bodies are added later at phase Constructors @@ -76,9 +73,7 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => if tdef.symbol.isClass && !tdef.symbol.is(Deferred) && tdef.rhs == EmptyTree => errorLackImplementation(tdef) case _ => - } super.checkPostCondition(tree) - } /** Should run after mixin so that fields get generated in the * class that contains the concrete getter rather than the trait @@ -97,10 +92,10 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => else tree - override def transformDefDef(tree: DefDef)(using Context): Tree = { + override def transformDefDef(tree: DefDef)(using Context): Tree = val sym = tree.symbol - def newField = { + def newField = assert(!sym.hasAnnotation(defn.ScalaStaticAnnot)) val fieldType = if (sym.isGetter) sym.info.resultType @@ -113,8 +108,7 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => info = fieldType, coord = tree.span ).withAnnotationsCarrying(sym, defn.FieldMetaAnnot, orNoneOf = defn.MetaAnnots) - .enteredAfter(thisPhase) - } + .enteredAfter(thisPhase) val NoFieldNeeded = Lazy | Deferred | JavaDefined | Inline @@ -122,10 +116,9 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => if (sym eq defn.NothingClass) Throw(nullLiteral) else if (sym eq defn.NullClass) nullLiteral else if (sym eq defn.BoxedUnitClass) ref(defn.BoxedUnit_UNIT) - else { + else assert(false, s"$sym has no erased bottom tree") EmptyTree - } if sym.is(Accessor, butNot = NoFieldNeeded) then def adaptToField(field: Symbol, tree: Tree): Tree = @@ -184,5 +177,4 @@ class Memoize extends MiniPhase with IdentityDenotTransformer { thisPhase => tree else tree - } } diff --git a/compiler/src/dotty/tools/dotc/transform/Mixin.scala b/compiler/src/dotty/tools/dotc/transform/Mixin.scala index 5ca09dd6188f..7ca3d76dc7ff 100644 --- a/compiler/src/dotty/tools/dotc/transform/Mixin.scala +++ b/compiler/src/dotty/tools/dotc/transform/Mixin.scala @@ -18,19 +18,17 @@ import NameKinds._ import NameOps._ import ast.Trees._ -object Mixin { +object Mixin: val name: String = "mixin" val description: String = "expand trait fields and trait initializers" def traitSetterName(getter: TermSymbol)(using Context): TermName = - extension (name: Name) def qualifiedToSimple = name.replace { + extension (name: Name) def qualifiedToSimple = name.replace: case n @ AnyQualifiedName(_, _) => n.toSimpleName - } getter.ensureNotPrivate.name .qualifiedToSimple // TODO: Find out why TraitSetterNames can't be defined over QualifiedNames .expandedName(getter.owner, TraitSetterName) .asTermName.syntheticSetterName -} /** This phase performs the following transformations: * @@ -130,12 +128,11 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => override def transformSym(sym: SymDenotation)(using Context): SymDenotation = def ownerIsTrait: Boolean = was(sym.owner, Trait, butNot = JavaDefined) - if (sym.is(Accessor, butNot = Deferred) && ownerIsTrait) { + if (sym.is(Accessor, butNot = Deferred) && ownerIsTrait) val sym1 = if (sym.is(Lazy) || sym.symbol.isConstExprFinalVal) sym else sym.copySymDenotation(initFlags = sym.flags &~ (ParamAccessor | Inline) | Deferred) sym1.ensureNotPrivate - } else if sym.isAllOf(ModuleClass | Private) && ownerIsTrait then // modules in trait will be instantiated in the classes mixing in the trait; they must be made non-private // do not use ensureNotPrivate because the `name` must not be expanded in this case @@ -181,13 +178,13 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => flags = Method | Accessor | Deferred, info = MethodType(getter.info.resultType :: Nil, defn.UnitType)) - override def transformTemplate(impl: Template)(using Context): Template = { + override def transformTemplate(impl: Template)(using Context): Template = val cls = impl.symbol.owner.asClass val ops = new MixinOps(cls, thisPhase) import ops._ - def traitDefs(stats: List[Tree]): List[Tree] = { - stats.flatMap { + def traitDefs(stats: List[Tree]): List[Tree] = + stats.flatMap: case stat: DefDef if needsTraitSetter(stat.symbol) => // add a trait setter for this getter stat :: DefDef(stat.symbol.traitSetter.asTerm, EmptyTree) :: Nil @@ -195,8 +192,6 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => cpy.DefDef(stat)(rhs = EmptyTree) :: Nil case stat => stat :: Nil - } - } /** Map constructor call to a triple of a supercall, and if the target * is a trait @@ -204,14 +199,14 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => * due to reorderings with named and/or default parameters). * - a list of arguments to be used as initializers of trait parameters */ - def transformConstructor(tree: Tree): (Tree, List[Tree], List[Tree]) = tree match { + def transformConstructor(tree: Tree): (Tree, List[Tree], List[Tree]) = tree match case Block(stats, expr) => val (scall, inits, args) = transformConstructor(expr) if args.isEmpty then (cpy.Block(tree)(stats, scall), inits, args) else // it's a trait constructor with parameters, lift all prefix statements to class context // so that they precede argument definitions. - stats.foreach { + stats.foreach: case stat: ValDef => stat.symbol.copySymDenotation( owner = cls, @@ -219,13 +214,11 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => ).installAfter(thisPhase) stat.symbol.enteredAfter(thisPhase) case _ => - } (scall, stats ::: inits, args) case _ => val Apply(sel @ Select(New(_), nme.CONSTRUCTOR), args) = tree: @unchecked val (callArgs, initArgs) = if (tree.symbol.owner.is(Trait)) (Nil, args) else (args, Nil) (superRef(tree.symbol, tree.span).appliedToTermArgs(callArgs), Nil, initArgs) - } val superCallsAndArgs: Map[Symbol, (Tree, List[Tree], List[Tree])] = ( for (p <- impl.parents; constr = stripBlock(p).symbol if constr.isConstructor) @@ -243,7 +236,7 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => //println(i"synth super call ${baseCls.primaryConstructor}: ${baseCls.primaryConstructor.info}") transformFollowingDeep(superRef(baseCls.primaryConstructor).appliedToNone) :: Nil - def traitInits(mixin: ClassSymbol): List[Tree] = { + def traitInits(mixin: ClassSymbol): List[Tree] = val argsIt = superCallsAndArgs.get(mixin) match case Some((_, _, args)) => args.iterator case _ => Iterator.empty @@ -266,7 +259,7 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => && !wasOneOf(getter, Deferred) && !getter.isConstExprFinalVal yield - if (isInImplementingClass(getter) || getter.name.is(ExpandedName)) { + if (isInImplementingClass(getter) || getter.name.is(ExpandedName)) val rhs = if (wasOneOf(getter, ParamAccessor)) nextArgument() @@ -278,12 +271,10 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => Underscore(getter.info.resultType) // transformFollowing call is needed to make memoize & lazy vals run transformFollowing(DefDef(mkForwarderSym(getter.asTerm), rhs)) - } else if wasOneOf(getter, ParamAccessor) then // mixin parameter field is defined by an override; evaluate the argument and throw it away nextArgument() else EmptyTree - } def setters(mixin: ClassSymbol): List[Tree] = val mixinSetters = mixin.info.decls.filter { sym => @@ -294,19 +285,18 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => def mixinForwarders(mixin: ClassSymbol): List[Tree] = for (meth <- mixin.info.decls.toList if needsMixinForwarder(meth)) - yield { + yield util.Stats.record("mixin forwarders") transformFollowing(DefDef(mkForwarderSym(meth.asTerm, Bridge), forwarderRhsFn(meth))) - } cpy.Template(impl)( constr = if (cls.is(Trait)) cpy.DefDef(impl.constr)(paramss = Nil :: Nil) else impl.constr, - parents = impl.parents.map(p => TypeTree(p.tpe).withSpan(p.span)), + parents = impl.parents.map(p => TypeTree(p.tpe).withSpan(p.span)), body = if (cls.is(Trait)) traitDefs(impl.body) - else if (!cls.isPrimitiveValueClass) { + else if (!cls.isPrimitiveValueClass) val mixInits = mixins.flatMap { mixin => val prefix = superCallsAndArgs.get(mixin) match case Some((_, inits, _)) => inits @@ -320,7 +310,5 @@ class Mixin extends MiniPhase with SymTransformer { thisPhase => superCallOpt(superCls) ::: mixInits ::: impl.body - } else impl.body) - } } diff --git a/compiler/src/dotty/tools/dotc/transform/MixinOps.scala b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala index fa1c09806893..c02171131229 100644 --- a/compiler/src/dotty/tools/dotc/transform/MixinOps.scala +++ b/compiler/src/dotty/tools/dotc/transform/MixinOps.scala @@ -8,7 +8,7 @@ import SymUtils._ import StdNames._, NameOps._ import typer.Nullables -class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(using Context) { +class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(using Context): import ast.tpd._ val superCls: Symbol = cls.superClass @@ -18,7 +18,7 @@ class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(using Context) { map(n => getClassIfDefined("org.junit." + n)). filter(_.exists) - def mkForwarderSym(member: TermSymbol, extraFlags: FlagSet = EmptyFlags): TermSymbol = { + def mkForwarderSym(member: TermSymbol, extraFlags: FlagSet = EmptyFlags): TermSymbol = val res = member.copy( owner = cls, name = member.name.stripScala2LocalSuffix, @@ -26,9 +26,8 @@ class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(using Context) { info = cls.thisType.memberInfo(member)).enteredAfter(thisPhase).asTerm res.addAnnotations(member.annotations.filter(_.symbol != defn.TailrecAnnot)) res - } - def superRef(target: Symbol, span: Span = cls.span): Tree = { + def superRef(target: Symbol, span: Span = cls.span): Tree = val sup = if (target.isConstructor && !target.owner.is(Trait)) Super(This(cls), tpnme.EMPTY) else @@ -37,22 +36,20 @@ class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(using Context) { ast.untpd.Select(sup.withSpan(span), target.name) .withType(NamedType(sup.tpe, target)) //sup.select(target) - } /** Is `sym` a member of implementing class `cls`? * The test is performed at phase `thisPhase`. */ def isInImplementingClass(sym: Symbol): Boolean = - atPhase(thisPhase) { + atPhase(thisPhase): cls.info.nonPrivateMember(sym.name).hasAltWith(_.symbol == sym) - } /** Does `method` need a forwarder to in class `cls` * Method needs a forwarder in those cases: * - there's a class defining a method with same signature * - there are multiple traits defining method with same signature */ - def needsMixinForwarder(meth: Symbol): Boolean = { + def needsMixinForwarder(meth: Symbol): Boolean = lazy val competingMethods = competingMethodsIterator(meth).toList def needsDisambiguation = competingMethods.exists(x=> !x.is(Deferred)) // multiple implementations are available @@ -70,9 +67,8 @@ class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(using Context) { !meth.isConstructor && meth.is(Method, butNot = PrivateOrAccessorOrDeferred) && (ctx.settings.mixinForwarderChoices.isTruthy || meth.owner.is(Scala2x) || needsDisambiguation || hasNonInterfaceDefinition || - generateJUnitForwarder || generateSerializationForwarder) && + generateJUnitForwarder || generateSerializationForwarder) && isInImplementingClass(meth) - } final val PrivateOrAccessor: FlagSet = Private | Accessor final val PrivateOrAccessorOrDeferred: FlagSet = Private | Accessor | Deferred @@ -101,4 +97,3 @@ class MixinOps(cls: ClassSymbol, thisPhase: DenotTransformer)(using Context) { .filter(_ ne meth.owner) .map(base => meth.overriddenSymbol(base, cls)) .filter(_.exists) -} diff --git a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala index db96aeefe231..336cceb1fdb3 100644 --- a/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala +++ b/compiler/src/dotty/tools/dotc/transform/MoveStatics.scala @@ -19,7 +19,7 @@ import SymUtils._ import MegaPhase._ /** Move static methods from companion to the class itself */ -class MoveStatics extends MiniPhase with SymTransformer { +class MoveStatics extends MiniPhase with SymTransformer: import ast.tpd._ override def phaseName: String = MoveStatics.name @@ -28,22 +28,21 @@ class MoveStatics extends MiniPhase with SymTransformer { def transformSym(sym: SymDenotation)(using Context): SymDenotation = if (sym.hasAnnotation(defn.ScalaStaticAnnot) && sym.owner.is(Flags.Module) && sym.owner.companionClass.exists && - (sym.is(Flags.Method) || !(sym.is(Flags.Mutable) && sym.owner.companionClass.is(Flags.Trait)))) { + (sym.is(Flags.Method) || !(sym.is(Flags.Mutable) && sym.owner.companionClass.is(Flags.Trait)))) sym.owner.asClass.delete(sym.symbol) sym.owner.companionClass.asClass.enter(sym.symbol) sym.copySymDenotation(owner = sym.owner.companionClass) - } else sym override def transformStats(trees: List[Tree])(using Context): List[Tree] = - if (ctx.owner.is(Flags.Package)) { + if (ctx.owner.is(Flags.Package)) val (classes, others) = trees.partition(x => x.isInstanceOf[TypeDef] && x.symbol.isClass) val pairs = classes.groupBy(_.symbol.name.stripModuleClassSuffix).asInstanceOf[Map[Name, List[TypeDef]]] - def rebuild(orig: TypeDef, newBody: List[Tree]): Tree = { + def rebuild(orig: TypeDef, newBody: List[Tree]): Tree = val staticFields = newBody.filter(x => x.isInstanceOf[ValDef] && x.symbol.hasAnnotation(defn.ScalaStaticAnnot)).asInstanceOf[List[ValDef]] val newBodyWithStaticConstr = - if (staticFields.nonEmpty) { + if (staticFields.nonEmpty) /* do NOT put Flags.JavaStatic here. It breaks .enclosingClass */ val staticCostructor = newSymbol(orig.symbol, nme.STATIC_CONSTRUCTOR, Flags.Synthetic | Flags.Method | Flags.Private, MethodType(Nil, defn.UnitType)) staticCostructor.addAnnotation(Annotation(defn.ScalaStaticAnnot, staticCostructor.span)) @@ -51,42 +50,33 @@ class MoveStatics extends MiniPhase with SymTransformer { val staticAssigns = staticFields.map(x => Assign(ref(x.symbol), x.rhs.changeOwner(x.symbol, staticCostructor))) tpd.DefDef(staticCostructor, Block(staticAssigns, tpd.unitLiteral)) :: newBody - } else newBody val oldTemplate = orig.rhs.asInstanceOf[Template] cpy.TypeDef(orig)(rhs = cpy.Template(oldTemplate)(body = newBodyWithStaticConstr)) - } - def move(module: TypeDef, companion: TypeDef): List[Tree] = { + def move(module: TypeDef, companion: TypeDef): List[Tree] = assert(companion != module) if (!module.symbol.is(Flags.Module)) move(companion, module) - else { + else val moduleTmpl = module.rhs.asInstanceOf[Template] val companionTmpl = companion.rhs.asInstanceOf[Template] - val (staticDefs, remainingDefs) = moduleTmpl.body.partition { + val (staticDefs, remainingDefs) = moduleTmpl.body.partition: case memberDef: MemberDef => memberDef.symbol.isScalaStatic case _ => false - } rebuild(companion, companionTmpl.body ++ staticDefs) :: rebuild(module, remainingDefs) :: Nil - } - } val newPairs = for ((name, classes) <- pairs) yield - if (classes.tail.isEmpty) { + if (classes.tail.isEmpty) val classDef = classes.head val tmpl = classDef.rhs.asInstanceOf[Template] rebuild(classDef, tmpl.body) :: Nil - } else move(classes.head, classes.tail.head) Trees.flatten(newPairs.toList.flatten ++ others) - } else trees -} -object MoveStatics { +object MoveStatics: val name: String = "moveStatic" val description: String = "move static methods from companion to the class itself" -} diff --git a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala index a75d6da9dd6a..e0a0db66643f 100644 --- a/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala +++ b/compiler/src/dotty/tools/dotc/transform/NonLocalReturns.scala @@ -8,7 +8,7 @@ import NameKinds.NonLocalReturnKeyName import config.SourceVersion.* import Decorators.em -object NonLocalReturns { +object NonLocalReturns: import ast.tpd._ val name: String = "nonLocalReturns" @@ -16,11 +16,10 @@ object NonLocalReturns { def isNonLocalReturn(ret: Return)(using Context): Boolean = !ret.from.symbol.is(Label) && (ret.from.symbol != ctx.owner.enclosingMethod || ctx.owner.is(Lazy)) -} /** Implement non-local returns using NonLocalReturnControl exceptions. */ -class NonLocalReturns extends MiniPhase { +class NonLocalReturns extends MiniPhase: override def phaseName: String = NonLocalReturns.name @@ -76,7 +75,7 @@ class NonLocalReturns extends MiniPhase { * } * } */ - private def nonLocalReturnTry(body: Tree, key: TermSymbol, meth: Symbol)(using Context) = { + private def nonLocalReturnTry(body: Tree, key: TermSymbol, meth: Symbol)(using Context) = val keyDef = ValDef(key, New(defn.ObjectType, Nil)) val ex = newSymbol(meth, nme.ex, Case, nonLocalReturnControl, coord = body.span) val pat = BindTyped(ex, nonLocalReturnControl) @@ -87,7 +86,6 @@ class NonLocalReturns extends MiniPhase { val catches = CaseDef(pat, EmptyTree, rhs) :: Nil val tryCatch = Try(body, catches, EmptyTree) Block(keyDef :: Nil, tryCatch) - } override def transformDefDef(tree: DefDef)(using Context): Tree = nonLocalReturnKeys.remove(tree.symbol) match @@ -103,4 +101,3 @@ class NonLocalReturns extends MiniPhase { errorFrom = future) nonLocalReturnThrow(tree.expr, tree.from.symbol).withSpan(tree.span) else tree -} diff --git a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala index 48dc7c818360..d14a1ad36370 100644 --- a/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala +++ b/compiler/src/dotty/tools/dotc/transform/OverridingPairs.scala @@ -47,26 +47,23 @@ object OverridingPairs: sym1.isType || sym1.asSeenFrom(self).matches(sym2.asSeenFrom(self)) /** The symbols that can take part in an overriding pair */ - private val decls = { + private val decls = val decls = newScope // fill `decls` with overriding shadowing overridden */ - def fillDecls(bcs: List[Symbol], deferred: Boolean): Unit = bcs match { + def fillDecls(bcs: List[Symbol], deferred: Boolean): Unit = bcs match case bc :: bcs1 => fillDecls(bcs1, deferred) var e = bc.info.decls.lastEntry - while (e != null) { + while (e != null) if (e.sym.is(Deferred) == deferred && !exclude(e.sym)) decls.enter(e.sym) e = e.prev - } case nil => - } // first, deferred (this will need to change if we change lookup rules! fillDecls(base.info.baseClasses, deferred = true) // then, concrete. fillDecls(base.info.baseClasses, deferred = false) decls - } /** Is `parent` a qualified sub-parent of `bc`? * @pre `parent` is a parent class of `base` and it derives from `bc`. @@ -130,18 +127,15 @@ object OverridingPairs: * nextEntry = curEntry * overriding = curEntry.sym */ - private def nextOverriding(): Unit = { + private def nextOverriding(): Unit = @tailrec def loop(): Unit = - if (curEntry != null) { + if (curEntry != null) overriding = curEntry.uncheckedNN.sym - if (visited.contains(overriding)) { + if (visited.contains(overriding)) curEntry = curEntry.uncheckedNN.prev loop() - } - } loop() nextEntry = curEntry - } /** @post * hasNext = there is another overriding pair diff --git a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala index ac1e1868f26e..d84260a29239 100644 --- a/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala +++ b/compiler/src/dotty/tools/dotc/transform/PatternMatcher.scala @@ -26,7 +26,7 @@ import scala.collection.mutable * After this phase, the only Match nodes remaining in the code are simple switches * where every pattern is an integer or string constant */ -class PatternMatcher extends MiniPhase { +class PatternMatcher extends MiniPhase: import ast.tpd._ import PatternMatcher._ @@ -38,7 +38,7 @@ class PatternMatcher extends MiniPhase { override def transformMatch(tree: Match)(using Context): Tree = if (tree.isInstanceOf[InlineMatch]) tree - else { + else // Widen termrefs with underlying `=> T` types. Otherwise ElimByName will produce // inconsistent types. See i7743.scala. // Question: Does this need to be done more systematically, not just for pattern matches? @@ -52,10 +52,8 @@ class PatternMatcher extends MiniPhase { SpaceEngine.checkRedundancy(tree) translated.ensureConforms(matchType) - } -} -object PatternMatcher { +object PatternMatcher: import ast.tpd._ val name: String = "patternMatcher" @@ -89,7 +87,7 @@ object PatternMatcher { * It's represented by its own data type. Plans are optimized by merging common * tests and eliminating dead code. */ - class Translator(resultType: Type, thisPhase: MiniPhase)(using Context) { + class Translator(resultType: Type, thisPhase: MiniPhase)(using Context): // ------- Bindings for variables and labels --------------------- @@ -107,38 +105,33 @@ object PatternMatcher { // TODO: Drop Case once we use everywhere else `isPatmatGenerated`. /** The plan `let x = rhs in body(x)` where `x` is a fresh variable */ - private def letAbstract(rhs: Tree, tpe: Type = NoType)(body: Symbol => Plan): Plan = { + private def letAbstract(rhs: Tree, tpe: Type = NoType)(body: Symbol => Plan): Plan = val declTpe = if tpe.exists then tpe else rhs.tpe val vble = newVar(rhs, EmptyFlags, declTpe) initializer(vble) = rhs LetPlan(vble, body(vble)) - } /** The plan `l: { expr(l) }` where `l` is a fresh label */ - private def altsLabeledAbstract(expr: (=> Plan) => Plan): Plan = { + private def altsLabeledAbstract(expr: (=> Plan) => Plan): Plan = val label = newSymbol(ctx.owner, PatMatAltsName.fresh(), Synthetic | Label, defn.UnitType) LabeledPlan(label, expr(ReturnPlan(label))) - } /** Test whether a type refers to a pattern-generated variable */ - private val refersToInternal = new TypeAccumulator[Boolean] { + private val refersToInternal = new TypeAccumulator[Boolean]: def apply(x: Boolean, tp: Type) = x || { - tp match { + tp match case tp: TermRef => isPatmatGenerated(tp.symbol) case _ => false - } } || foldOver(x, tp) - } /** Widen type as far as necessary so that it does not refer to a pattern- * generated variable. */ - private def sanitize(tp: Type): Type = tp.widenIfUnstable match { + private def sanitize(tp: Type): Type = tp.widenIfUnstable match case tp: TermRef if refersToInternal(false, tp) => sanitize(tp.underlying) case tp => tp - } // ------- Plan and test types ------------------------ @@ -149,13 +142,11 @@ object PatternMatcher { sealed abstract class Plan { val id: Int = nxId; nxId += 1 } case class TestPlan(test: Test, var scrutinee: Tree, span: Span, - var onSuccess: Plan) extends Plan { - override def equals(that: Any): Boolean = that match { + var onSuccess: Plan) extends Plan: + override def equals(that: Any): Boolean = that match case that: TestPlan => this.scrutinee === that.scrutinee && this.test == that.test case _ => false - } override def hashCode: Int = scrutinee.hash * 41 + test.hashCode - } case class LetPlan(sym: TermSymbol, var body: Plan) extends Plan case class LabeledPlan(sym: TermSymbol, var expr: Plan) extends Plan @@ -163,27 +154,22 @@ object PatternMatcher { case class SeqPlan(var head: Plan, var tail: Plan) extends Plan case class ResultPlan(var tree: Tree) extends Plan - object TestPlan { + object TestPlan: def apply(test: Test, sym: Symbol, span: Span, ons: Plan): TestPlan = TestPlan(test, ref(sym), span, ons) - } /** The different kinds of tests */ sealed abstract class Test - case class TypeTest(tpt: Tree, trusted: Boolean) extends Test { // scrutinee.isInstanceOf[tpt] - override def equals(that: Any): Boolean = that match { + case class TypeTest(tpt: Tree, trusted: Boolean) extends Test: // scrutinee.isInstanceOf[tpt] + override def equals(that: Any): Boolean = that match case that: TypeTest => this.tpt.tpe =:= that.tpt.tpe case _ => false - } override def hashCode: Int = tpt.tpe.hash - } - case class EqualTest(tree: Tree) extends Test { // scrutinee == tree - override def equals(that: Any): Boolean = that match { + case class EqualTest(tree: Tree) extends Test: // scrutinee == tree + override def equals(that: Any): Boolean = that match case that: EqualTest => this.tree === that.tree case _ => false - } override def hashCode: Int = tree.hash - } case class LengthTest(len: Int, exact: Boolean) extends Test // scrutinee (== | >=) len case object NonEmptyTest extends Test // !scrutinee.isEmpty case object NonNullTest extends Test // scrutinee ne null @@ -197,8 +183,8 @@ object PatternMatcher { /** A conservative approximation of which patterns do not discern anything. * They are discarded during the translation. */ - private object WildcardPattern { - def unapply(pat: Tree): Boolean = pat match { + private object WildcardPattern: + def unapply(pat: Tree): Boolean = pat match case Typed(_, tpt) if tpt.tpe.isRepeatedParam => true case Bind(nme.WILDCARD, WildcardPattern()) => true // don't skip when binding an interesting symbol! case t if isWildcardArg(t) => true @@ -206,30 +192,24 @@ object PatternMatcher { case Alternative(ps) => ps.forall(unapply) case EmptyTree => true case _ => false - } - } - private object VarArgPattern { - def unapply(pat: Tree): Option[Tree] = swapBind(pat) match { + private object VarArgPattern: + def unapply(pat: Tree): Option[Tree] = swapBind(pat) match case Typed(pat1, tpt) if tpt.tpe.isRepeatedParam => Some(pat1) case _ => None - } - } /** Rewrite (repeatedly) `x @ (p: T)` to `(x @ p): T` * This brings out the type tests to where they can be analyzed. */ - private def swapBind(tree: Tree): Tree = tree match { + private def swapBind(tree: Tree): Tree = tree match case Bind(name, pat0) => - swapBind(pat0) match { + swapBind(pat0) match case Typed(pat, tpt) => Typed(cpy.Bind(tree)(name, pat), tpt) case _ => tree - } case _ => tree - } /** Plan for matching `scrutinee` symbol against `tree` pattern */ - private def patternPlan(scrutinee: Symbol, tree: Tree, onSuccess: Plan): Plan = { + private def patternPlan(scrutinee: Symbol, tree: Tree, onSuccess: Plan): Plan = extension (tree: Tree) def avoidPatBoundType(): Type = tree.tpe.widen match @@ -239,7 +219,7 @@ object PatternMatcher { tree.tpe /** Plan for matching `components` against argument patterns `args` */ - def matchArgsPlan(components: List[Tree], args: List[Tree], onSuccess: Plan): Plan = { + def matchArgsPlan(components: List[Tree], args: List[Tree], onSuccess: Plan): Plan = /* For a case with arguments that have some test on them such as * ``` * case Foo(1, 2) => someCode @@ -257,63 +237,56 @@ object PatternMatcher { * ``` */ def matchArgsComponentsPlan(components: List[Tree], syms: List[Symbol]): Plan = - components match { + components match case component :: components1 => letAbstract(component, component.avoidPatBoundType())(sym => matchArgsComponentsPlan(components1, sym :: syms)) case Nil => matchArgsPatternPlan(args, syms.reverse) - } def matchArgsPatternPlan(args: List[Tree], syms: List[Symbol]): Plan = - args match { + args match case arg :: args1 => val sym :: syms1 = syms: @unchecked patternPlan(sym, arg, matchArgsPatternPlan(args1, syms1)) case Nil => assert(syms.isEmpty) onSuccess - } matchArgsComponentsPlan(components, Nil) - } /** Plan for matching the sequence in `seqSym` against sequence elements `args`. * If `exact` is true, the sequence is not permitted to have any elements following `args`. */ - def matchElemsPlan(seqSym: Symbol, args: List[Tree], exact: Boolean, onSuccess: Plan) = { + def matchElemsPlan(seqSym: Symbol, args: List[Tree], exact: Boolean, onSuccess: Plan) = val selectors = args.indices.toList.map(idx => ref(seqSym).select(defn.Seq_apply.matchingMember(seqSym.info)).appliedTo(Literal(Constant(idx)))) TestPlan(LengthTest(args.length, exact), seqSym, seqSym.span, matchArgsPlan(selectors, args, onSuccess)) - } /** Plan for matching the sequence in `getResult` against sequence elements * and a possible last varargs argument `args`. */ - def unapplySeqPlan(getResult: Symbol, args: List[Tree]): Plan = args.lastOption match { + def unapplySeqPlan(getResult: Symbol, args: List[Tree]): Plan = args.lastOption match case Some(VarArgPattern(arg)) => val matchRemaining = - if (args.length == 1) { + if (args.length == 1) val toSeq = ref(getResult) .select(defn.Seq_toSeq.matchingMember(getResult.info)) letAbstract(toSeq) { toSeqResult => patternPlan(toSeqResult, arg, onSuccess) } - } - else { + else val dropped = ref(getResult) .select(defn.Seq_drop.matchingMember(getResult.info)) .appliedTo(Literal(Constant(args.length - 1))) letAbstract(dropped) { droppedResult => patternPlan(droppedResult, arg, onSuccess) } - } matchElemsPlan(getResult, args.init, exact = false, matchRemaining) case _ => matchElemsPlan(getResult, args, exact = true, onSuccess) - } /** Plan for matching the sequence in `getResult` * * `getResult` is a product, where the last element is a sequence of elements. */ - def unapplyProductSeqPlan(getResult: Symbol, args: List[Tree], arity: Int): Plan = { + def unapplyProductSeqPlan(getResult: Symbol, args: List[Tree], arity: Int): Plan = assert(arity <= args.size + 1) val selectors = productSelectors(getResult.info).map(ref(getResult).select(_)) @@ -322,10 +295,9 @@ object PatternMatcher { unapplySeqPlan(seqResult, args.drop(arity - 1)) } matchArgsPlan(selectors.take(arity - 1), args.take(arity - 1), matchSeq) - } /** Plan for matching the result of an unapply against argument patterns `args` */ - def unapplyPlan(unapp: Tree, args: List[Tree]): Plan = { + def unapplyPlan(unapp: Tree, args: List[Tree]): Plan = def caseClass = unapp.symbol.owner.linkedClass lazy val caseAccessors = caseClass.caseAccessors.filter(_.is(Method)) @@ -348,24 +320,21 @@ object PatternMatcher { else letAbstract(unapp) { unappResult => val isUnapplySeq = unapp.symbol.name == nme.unapplySeq - if (isProductMatch(unapp.tpe.widen, args.length) && !isUnapplySeq) { + if (isProductMatch(unapp.tpe.widen, args.length) && !isUnapplySeq) val selectors = productSelectors(unapp.tpe).take(args.length) .map(ref(unappResult).select(_)) matchArgsPlan(selectors, args, onSuccess) - } - else if (isUnapplySeq && isProductSeqMatch(unapp.tpe.widen, args.length, unapp.srcPos)) { + else if (isUnapplySeq && isProductSeqMatch(unapp.tpe.widen, args.length, unapp.srcPos)) val arity = productArity(unapp.tpe.widen, unapp.srcPos) unapplyProductSeqPlan(unappResult, args, arity) - } - else if (isUnapplySeq && unapplySeqTypeElemTp(unapp.tpe.widen.finalResultType).exists) { + else if (isUnapplySeq && unapplySeqTypeElemTp(unapp.tpe.widen.finalResultType).exists) unapplySeqPlan(unappResult, args) - } else if unappResult.info <:< defn.NonEmptyTupleTypeRef then val components = (0 until foldApplyTupleType(unappResult.denot.info).length).toList.map(tupleApp(_, ref(unappResult))) matchArgsPlan(components, args, onSuccess) - else { + else assert(isGetMatch(unapp.tpe)) - val argsPlan = { + val argsPlan = val get = ref(unappResult).select(nme.get, _.info.isParameterless) val arity = productArity(get.tpe, unapp.srcPos) if (isUnapplySeq) @@ -380,22 +349,18 @@ object PatternMatcher { else productSelectors(get.tpe).map(ref(getResult).select(_)) matchArgsPlan(selectors, args, onSuccess) } - } TestPlan(NonEmptyTest, unappResult, unapp.span, argsPlan) - } } - } // begin patternPlan - swapBind(tree) match { + swapBind(tree) match case Typed(pat, tpt) => - val isTrusted = pat match { + val isTrusted = pat match case UnApply(extractor, _, _) => extractor.symbol.is(Synthetic) && extractor.symbol.owner.linkedClass.is(Case) && !hasExplicitTypeArgs(extractor) case _ => false - } TestPlan(TypeTest(tpt, isTrusted), scrutinee, tree.span, letAbstract(ref(scrutinee).cast(tpt.tpe)) { casted => nonNull += casted @@ -406,8 +371,8 @@ object PatternMatcher { // Generate a throwaway but type-correct plan. // This plan will never execute because it'll be guarded by a `NonNullTest`. ResultPlan(tpd.Throw(tpd.nullLiteral)) - else { - def applyImplicits(acc: Tree, implicits: List[Tree], mt: Type): Tree = mt match { + else + def applyImplicits(acc: Tree, implicits: List[Tree], mt: Type): Tree = mt match case mt: MethodType => assert(mt.isImplicitMethod) val (args, rest) = implicits.splitAt(mt.paramNames.size) @@ -415,22 +380,19 @@ object PatternMatcher { case _ => assert(implicits.isEmpty) acc - } val mt @ MethodType(_) = extractor.tpe.widen: @unchecked val unapp0 = extractor.appliedTo(ref(scrutinee).ensureConforms(mt.paramInfos.head)) val unapp = applyImplicits(unapp0, implicits, mt.resultType) unapplyPlan(unapp, args) - } if (scrutinee.info.isNotNull || nonNull(scrutinee)) unappPlan else TestPlan(NonNullTest, scrutinee, tree.span, unappPlan) case Bind(name, body) => if (name == nme.WILDCARD) patternPlan(scrutinee, body, onSuccess) - else { + else // The type of `name` may refer to val in `body`, therefore should come after `body` val bound = tree.symbol.asTerm initializer(bound) = ref(scrutinee) patternPlan(scrutinee, body, LetPlan(bound, onSuccess)) - } case Alternative(alts) => altsLabeledAbstract { onf => SeqPlan( @@ -448,15 +410,12 @@ object PatternMatcher { matchElemsPlan(scrutinee, pats, exact = true, onSuccess) case _ => TestPlan(EqualTest(tree), scrutinee, tree.span, onSuccess) - } - } - private def caseDefPlan(scrutinee: Symbol, cdef: CaseDef): Plan = { + private def caseDefPlan(scrutinee: Symbol, cdef: CaseDef): Plan = var onSuccess: Plan = ResultPlan(cdef.body) if (!cdef.guard.isEmpty) onSuccess = TestPlan(GuardTest, cdef.guard, cdef.guard.span, onSuccess) patternPlan(scrutinee, cdef.pat, onSuccess) - } private def matchPlan(tree: Match): Plan = letAbstract(tree.selector) { scrutinee => @@ -469,91 +428,72 @@ object PatternMatcher { // ----- Optimizing plans --------------- /** A superclass for plan transforms */ - class PlanTransform extends (Plan => Plan) { - protected val treeMap: TreeMap = new TreeMap { + class PlanTransform extends (Plan => Plan): + protected val treeMap: TreeMap = new TreeMap: override def transform(tree: Tree)(using Context) = tree - } def apply(tree: Tree): Tree = treeMap.transform(tree) - def apply(plan: TestPlan): Plan = { + def apply(plan: TestPlan): Plan = plan.scrutinee = apply(plan.scrutinee) plan.onSuccess = apply(plan.onSuccess) plan - } - def apply(plan: LetPlan): Plan = { + def apply(plan: LetPlan): Plan = plan.body = apply(plan.body) initializer(plan.sym) = apply(initializer(plan.sym)) plan - } - def apply(plan: LabeledPlan): Plan = { + def apply(plan: LabeledPlan): Plan = plan.expr = apply(plan.expr) plan - } def apply(plan: ReturnPlan): Plan = plan - def apply(plan: SeqPlan): Plan = { + def apply(plan: SeqPlan): Plan = plan.head = apply(plan.head) plan.tail = apply(plan.tail) plan - } - def apply(plan: Plan): Plan = plan match { + def apply(plan: Plan): Plan = plan match case plan: TestPlan => apply(plan) case plan: LetPlan => apply(plan) case plan: LabeledPlan => apply(plan) case plan: ReturnPlan => apply(plan) case plan: SeqPlan => apply(plan) case plan: ResultPlan => plan - } - } - private class RefCounter extends PlanTransform { - val count = new mutable.HashMap[Symbol, Int] { + private class RefCounter extends PlanTransform: + val count = new mutable.HashMap[Symbol, Int]: override def default(key: Symbol) = 0 - } - } /** Reference counts for all labels */ - private def labelRefCount(plan: Plan): collection.Map[Symbol, Int] = { - object refCounter extends RefCounter { - override def apply(plan: LabeledPlan): Plan = { + private def labelRefCount(plan: Plan): collection.Map[Symbol, Int] = + object refCounter extends RefCounter: + override def apply(plan: LabeledPlan): Plan = apply(plan.expr) plan - } - override def apply(plan: ReturnPlan): Plan = { + override def apply(plan: ReturnPlan): Plan = count(plan.label) += 1 plan - } - } refCounter(plan) refCounter.count - } /** Reference counts for all variables */ - private def varRefCount(plan: Plan): collection.Map[Symbol, Int] = { - object refCounter extends RefCounter { - override val treeMap = new TreeMap { - override def transform(tree: Tree)(using Context) = tree match { + private def varRefCount(plan: Plan): collection.Map[Symbol, Int] = + object refCounter extends RefCounter: + override val treeMap = new TreeMap: + override def transform(tree: Tree)(using Context) = tree match case tree: Ident => if (isPatmatGenerated(tree.symbol)) count(tree.symbol) += 1 tree case _ => super.transform(tree) - } - } - override def apply(plan: LetPlan): Plan = { + override def apply(plan: LetPlan): Plan = apply(plan.body) if (count(plan.sym) != 0 || !isPatmatGenerated(plan.sym)) apply(initializer(plan.sym)) plan - } - override def apply(plan: SeqPlan): Plan = { + override def apply(plan: SeqPlan): Plan = apply(plan.head) if (canFallThrough(plan.head)) apply(plan.tail) plan - } - } refCounter(plan) refCounter.count - } /** Merge identical consecutive tests. * @@ -589,35 +529,30 @@ object PatternMatcher { * There are some tricks to "ignore" non-patmat-generated let bindings, i.e., * captures written in the source code, while identifying common subplans. */ - def mergeTests(plan: Plan): Plan = { - class SubstituteIdent(from: TermSymbol, to: TermSymbol) extends PlanTransform { - override val treeMap = new TreeMap { - override def transform(tree: Tree)(using Context) = tree match { + def mergeTests(plan: Plan): Plan = + class SubstituteIdent(from: TermSymbol, to: TermSymbol) extends PlanTransform: + override val treeMap = new TreeMap: + override def transform(tree: Tree)(using Context) = tree match case tree: Ident if tree.symbol == from => ref(to) case _ => super.transform(tree) - } - } - } - class MergeTests extends PlanTransform { - override def apply(plan: SeqPlan): Plan = { - def tryMerge(plan1: Plan, plan2: Plan): Option[Plan] = { - def skipNonPatmatGenedLets(plan: Plan): Plan = plan match { + class MergeTests extends PlanTransform: + override def apply(plan: SeqPlan): Plan = + def tryMerge(plan1: Plan, plan2: Plan): Option[Plan] = + def skipNonPatmatGenedLets(plan: Plan): Plan = plan match case LetPlan(sym, body) if !isPatmatGenerated(sym) => skipNonPatmatGenedLets(body) case _ => plan - } - def transferNonPatmatGenedLets(originalPlan: Plan, newPlan: Plan): Plan = originalPlan match { + def transferNonPatmatGenedLets(originalPlan: Plan, newPlan: Plan): Plan = originalPlan match case originalPlan: LetPlan if !isPatmatGenerated(originalPlan.sym) => originalPlan.body = transferNonPatmatGenedLets(originalPlan.body, newPlan) originalPlan case _ => newPlan - } - (skipNonPatmatGenedLets(plan1), skipNonPatmatGenedLets(plan2)) match { + (skipNonPatmatGenedLets(plan1), skipNonPatmatGenedLets(plan2)) match case (testPlan1: TestPlan, testPlan2: TestPlan) if testPlan1 == testPlan2 => /* Because testPlan2 is the same as testPlan1, it cannot possibly refer to * the symbols defined by any of the skipped lets. @@ -635,34 +570,26 @@ object PatternMatcher { case _ => None - } - } plan.head = apply(plan.head) plan.tail = apply(plan.tail) - plan.tail match { + plan.tail match case SeqPlan(tailHead, tailTail) => - tryMerge(plan.head, tailHead) match { + tryMerge(plan.head, tailHead) match case Some(merged) => SeqPlan(apply(merged), tailTail) case none => plan - } case tail => - tryMerge(plan.head, tail) match { + tryMerge(plan.head, tail) match case Some(merged) => apply(merged) case none => plan - } - } - } - } new MergeTests()(plan) - } /** Inline let-bound trees that are referenced only once and eliminate dead code. * * - Drop all variables that are not referenced anymore after inlining. * - Drop the `tail` of `SeqPlan`s whose `head` cannot fall through. */ - private def inlineVars(plan: Plan): Plan = { + private def inlineVars(plan: Plan): Plan = val refCount = varRefCount(plan) val LetPlan(topSym, _) = plan: @unchecked @@ -673,38 +600,31 @@ object PatternMatcher { else false - object Inliner extends PlanTransform { - override val treeMap = new TreeMap { - override def transform(tree: Tree)(using Context) = tree match { + object Inliner extends PlanTransform: + override val treeMap = new TreeMap: + override def transform(tree: Tree)(using Context) = tree match case tree: Ident => val sym = tree.symbol if (toDrop(sym)) transform(initializer(sym)) else tree case _ => super.transform(tree) - } - } override def apply(plan: LetPlan): Plan = if (toDrop(plan.sym)) apply(plan.body) - else { + else initializer(plan.sym) = apply(initializer(plan.sym)) plan.body = apply(plan.body) plan - } - override def apply(plan: SeqPlan): Plan = { + override def apply(plan: SeqPlan): Plan = val newHead = apply(plan.head) if (!canFallThrough(newHead)) // If the head cannot fall through, the tail is dead code newHead - else { + else plan.head = newHead plan.tail = apply(plan.tail) plan - } - } - } Inliner(plan) - } // ----- Generating trees from plans --------------- @@ -784,18 +704,17 @@ object PatternMatcher { end emitCondition @tailrec - private def canFallThrough(plan: Plan): Boolean = plan match { + private def canFallThrough(plan: Plan): Boolean = plan match case _:ReturnPlan | _:ResultPlan => false case _:TestPlan | _:LabeledPlan => true case LetPlan(_, body) => canFallThrough(body) case SeqPlan(_, tail) => canFallThrough(tail) - } /** Collect longest list of plans that represent possible cases of * a switch, including a last default case, by starting with this * plan and following onSuccess plans. */ - private def collectSwitchCases(scrutinee: Tree, plan: SeqPlan): List[(List[Tree], Plan)] = { + private def collectSwitchCases(scrutinee: Tree, plan: SeqPlan): List[(List[Tree], Plan)] = def isSwitchableType(tpe: Type): Boolean = (tpe isRef defn.IntClass) || (tpe isRef defn.ByteClass) || @@ -805,23 +724,22 @@ object PatternMatcher { val seen = mutable.Set[Any]() - def isNewSwitchableConst(tree: Tree) = tree match { + def isNewSwitchableConst(tree: Tree) = tree match case Literal(const) if (const.isIntRange || const.tag == Constants.StringTag) && !seen.contains(const.value) => seen += const.value true case _ => false - } // An extractor to recover the shape of plans that can become alternatives - object AlternativesPlan { + object AlternativesPlan: def unapply(plan: LabeledPlan): Option[(List[Tree], Plan)] = - plan.expr match { + plan.expr match case SeqPlan(LabeledPlan(innerLabel, innerPlan), ons) if !canFallThrough(ons) => val outerLabel = plan.sym val alts = List.newBuilder[Tree] - def rec(innerPlan: Plan): Boolean = innerPlan match { + def rec(innerPlan: Plan): Boolean = innerPlan match case SeqPlan(TestPlan(EqualTest(tree), scrut, _, ReturnPlan(`innerLabel`)), tail) if scrut === scrutinee && isNewSwitchableConst(tree) => alts += tree @@ -830,7 +748,6 @@ object PatternMatcher { true case _ => false - } if (rec(innerPlan)) Some((alts.result(), ons)) else @@ -838,10 +755,8 @@ object PatternMatcher { case _ => None - } - } - def recur(plan: Plan): List[(List[Tree], Plan)] = plan match { + def recur(plan: Plan): List[(List[Tree], Plan)] = plan match case SeqPlan(testPlan @ TestPlan(EqualTest(tree), scrut, _, ons), tail) if scrut === scrutinee && !canFallThrough(ons) && isNewSwitchableConst(tree) => (tree :: Nil, ons) :: recur(tail) @@ -849,23 +764,19 @@ object PatternMatcher { (alts, ons) :: recur(tail) case _ => (Nil, plan) :: Nil - } if (isSwitchableType(scrutinee.tpe.widen)) recur(plan) else Nil - } private def hasEnoughSwitchCases(cases: List[(List[Tree], Plan)], required: Int): Boolean = // 1 because of the default case - required <= 1 || { - cases match { + required <= 1 `||`: + cases match case (alts, _) :: cases1 => hasEnoughSwitchCases(cases1, required - alts.size) case _ => false - } - } /** Emit a switch-match */ - private def emitSwitchMatch(scrutinee: Tree, cases: List[(List[Tree], Plan)]): Match = { + private def emitSwitchMatch(scrutinee: Tree, cases: List[(List[Tree], Plan)]): Match = /* Make sure to adapt the scrutinee to Int or String, as well as all the * alternatives, so that only Matches on pritimive Ints or Strings survive * this phase. @@ -883,27 +794,24 @@ object PatternMatcher { else cpy.Literal(lit)(Constant(constant.intValue)) val caseDefs = cases.map { (alts, ons) => - val pat = alts match { + val pat = alts match case alt :: Nil => primLiteral(alt) case Nil => Underscore(scrutineeTpe) // default case case _ => Alternative(alts.map(primLiteral)) - } CaseDef(pat, EmptyTree, emit(ons)) } Match(primScrutinee, caseDefs) - } /** If selfCheck is `true`, used to check whether a tree gets generated twice */ private val emitted = mutable.Set[Int]() /** Translate plan to tree */ - private def emit(plan: Plan): Tree = { - if (selfCheck) { + private def emit(plan: Plan): Tree = + if (selfCheck) assert(plan.isInstanceOf[ReturnPlan] || !emitted.contains(plan.id), plan.id) emitted += plan.id - } - plan match { + plan match case plan: TestPlan => /** Merge nested `if`s that have the same `else` branch into a single `if`. * This optimization targets calls to label defs for case failure jumps to next case. @@ -925,9 +833,9 @@ object PatternMatcher { * else () * ``` */ - def emitWithMashedConditions(plans: List[TestPlan]): Tree = { + def emitWithMashedConditions(plans: List[TestPlan]): Tree = val plan = plans.head - plan.onSuccess match { + plan.onSuccess match case plan2: TestPlan => emitWithMashedConditions(plan2 :: plans) case _ => @@ -938,8 +846,6 @@ object PatternMatcher { else acc.select(nme.ZAND).appliedTo(emitCondWithPos(otherPlan)) } If(conditions, emit(plan.onSuccess), unitLiteral) - } - } emitWithMashedConditions(plan :: Nil) case LetPlan(sym, body) => val valDef = ValDef(sym, initializer(sym).ensureConforms(sym.info), inferred = true).withSpan(sym.span) @@ -950,43 +856,38 @@ object PatternMatcher { Return(Literal(Constant(())), ref(label)) case plan: SeqPlan => def default = seq(emit(plan.head) :: Nil, emit(plan.tail)) - def maybeEmitSwitch(scrutinee: Tree): Tree = { + def maybeEmitSwitch(scrutinee: Tree): Tree = val switchCases = collectSwitchCases(scrutinee, plan) if (hasEnoughSwitchCases(switchCases, MinSwitchCases)) // at least 3 cases + default emitSwitchMatch(scrutinee, switchCases) else default - } - plan.head match { + plan.head match case testPlan: TestPlan => maybeEmitSwitch(testPlan.scrutinee) case LabeledPlan(_, SeqPlan(LabeledPlan(_, SeqPlan(testPlan: TestPlan, _)), _)) => maybeEmitSwitch(testPlan.scrutinee) case _ => default - } case ResultPlan(tree) => if (tree.symbol == defn.throwMethod) tree // For example MatchError else Return(tree, ref(resultLabel)) - } - } /** Pretty-print plan; used for debugging */ - def show(plan: Plan): String = { + def show(plan: Plan): String = val lrefCount = labelRefCount(plan) val vrefCount = varRefCount(plan) val sb = new StringBuilder val seen = mutable.Set[Int]() - def showTest(test: Test) = test match { + def showTest(test: Test) = test match case EqualTest(tree) => i"EqualTest($tree)" case TypeTest(tpt, trusted) => i"TypeTest($tpt, trusted=$trusted)" case _ => test.toString - } def showPlan(plan: Plan): Unit = - if (!seen.contains(plan.id)) { + if (!seen.contains(plan.id)) seen += plan.id sb append s"\n${plan.id}: " - plan match { + plan match case TestPlan(test, scrutinee, _, ons) => sb.append(i"$scrutinee ? ${showTest(test)}(${ons.id})") showPlan(ons) @@ -1006,11 +907,8 @@ object PatternMatcher { showPlan(tail) case ResultPlan(tree) => sb.append(tree.show) - } - } showPlan(plan) sb.toString - } /** If match is switch annotated, check that it translates to a switch * with at least as many cases as the original match. @@ -1048,17 +946,13 @@ object PatternMatcher { ) /** Translate pattern match to sequence of tests. */ - def translateMatch(tree: Match): Tree = { + def translateMatch(tree: Match): Tree = var plan = matchPlan(tree) patmatch.println(i"Plan for $tree: ${show(plan)}") if (!ctx.settings.YnoPatmatOpt.value) - for ((title, optimization) <- optimizations) { + for ((title, optimization) <- optimizations) plan = optimization(plan) patmatch.println(s"After $title: ${show(plan)}") - } val result = emit(plan) checkSwitch(tree, result) Labeled(resultLabel, result) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala index 8b58f18bca52..c1f6c0d73d81 100644 --- a/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala +++ b/compiler/src/dotty/tools/dotc/transform/PickleQuotes.scala @@ -68,7 +68,7 @@ import scala.collection.mutable * and then performs the same transformation on any quote contained in the `content`s. * */ -class PickleQuotes extends MacroTransform { +class PickleQuotes extends MacroTransform: import PickleQuotes._ import tpd._ @@ -89,7 +89,7 @@ class PickleQuotes extends MacroTransform { override def run(using Context): Unit = if (ctx.compilationUnit.needsStaging) super.run - protected def newTransformer(using Context): Transformer = new Transformer { + protected def newTransformer(using Context): Transformer = new Transformer: override def transform(tree: tpd.Tree)(using Context): tpd.Tree = tree match case Apply(Select(quote: Quote, nme.apply), List(quotes)) => @@ -101,7 +101,6 @@ class PickleQuotes extends MacroTransform { tree case _ => super.transform(tree) - } private def extractHolesContents(quote: tpd.Quote)(using Context): (List[Tree], tpd.Quote) = class HoleContentExtractor extends Transformer: @@ -127,15 +126,13 @@ class PickleQuotes extends MacroTransform { case _ => super.transform(tree).withType(mapAnnots(tree.tpe)) - private def mapAnnots = new TypeMap { // TODO factor out duplicated logic in Splicing - override def apply(tp: Type): Type = { + private def mapAnnots = new TypeMap: // TODO factor out duplicated logic in Splicing + override def apply(tp: Type): Type = tp match case tp @ AnnotatedType(underlying, annot) => val underlying1 = this(underlying) derivedAnnotatedType(tp, underlying1, annot.derivedAnnotation(transform(annot.tree))) case _ => mapOver(tp) - } - } /** Get the holeContents of the transformed tree */ def getContents() = @@ -170,12 +167,11 @@ class PickleQuotes extends MacroTransform { else val tdefs = quote.tags.zipWithIndex.map(mkTagSymbolAndAssignType) val typeMapping = quote.tags.map(_.tpe).zip(tdefs.map(_.symbol.typeRef)).toMap - val typeMap = new TypeMap { + val typeMap = new TypeMap: override def apply(tp: Type): Type = tp match case TypeRef(tag: TermRef, _) if tp.typeSymbol == defn.QuotedType_splice => typeMapping.getOrElse(tag, tp) case _ => mapOver(tp) - } def treeMap(tree: Tree): Tree = tree match case Select(qual, _) if tree.symbol == defn.QuotedType_splice => typeMapping.get(qual.tpe) match @@ -185,7 +181,7 @@ class PickleQuotes extends MacroTransform { val body1 = new TreeTypeMap(typeMap, treeMap).transform(quote.body) cpy.Quote(quote)(Block(tdefs, body1), quote.tags) - private def mkTagSymbolAndAssignType(typeArg: Tree, idx: Int)(using Context): TypeDef = { + private def mkTagSymbolAndAssignType(typeArg: Tree, idx: Int)(using Context): TypeDef = val holeType = getPicklableHoleType(typeArg.tpe.select(tpnme.Underlying), _ => false) val hole = untpd.cpy.Hole(typeArg)(isTerm = false, idx, Nil, EmptyTree).withType(holeType) val local = newSymbol( @@ -197,29 +193,26 @@ class PickleQuotes extends MacroTransform { ).asType local.addAnnotation(Annotation(defn.QuotedRuntime_SplicedTypeAnnot, typeArg.span)) ctx.typeAssigner.assignType(untpd.TypeDef(local.name, hole), local).withSpan(typeArg.span) - } /** Avoid all non-static types except those defined in the quote. */ private def getPicklableHoleType(tpe: Type, isStagedClasses: Symbol => Boolean)(using Context) = new TypeOps.AvoidMap { def toAvoid(tp: NamedType) = !isStagedClasses(tp.typeSymbol) && !isStaticPrefix(tp) }.apply(tpe) -} -object PickleQuotes { +object PickleQuotes: import tpd._ val name: String = "pickleQuotes" val description: String = "turn quoted trees into explicit run-time data structures" - def pickle(quote: Quote, quotes: Tree, holeContents: List[Tree])(using Context) = { + def pickle(quote: Quote, quotes: Tree, holeContents: List[Tree])(using Context) = val body = quote.body val bodyType = quote.bodyType /** Helper methods to construct trees calling methods in `Quotes.reflect` based on the current `quotes` tree */ - object reflect extends ReifiedReflect { + object reflect extends ReifiedReflect: val quotesTree = quotes - } /** Encode quote using Reflection.Literal * @@ -231,7 +224,7 @@ object PickleQuotes { * ``` * this closure is always applied directly to the actual context and the BetaReduce phase removes it. */ - def pickleAsLiteral(lit: Literal) = { + def pickleAsLiteral(lit: Literal) = val typeName = body.tpe.typeSymbol.name val literalValue = if lit.const.tag == Constants.NullTag || lit.const.tag == Constants.UnitTag then Nil @@ -249,15 +242,12 @@ object PickleQuotes { case Constants.UnitTag => defn. Quotes_reflect_UnitConstant case Constants.NullTag => defn. Quotes_reflect_NullConstant case Constants.ClazzTag => defn. Quotes_reflect_ClassOfConstant - reflect.asExpr(body.tpe) { - reflect.Literal { + reflect.asExpr(body.tpe): + reflect.Literal: reflect.self .select(constModule) .select(nme.apply) .appliedToTermArgs(literalValue) - } - } - } /** Encode quote using Reflection.Literal * @@ -271,10 +261,10 @@ object PickleQuotes { val exprType = defn.QuotedExprClass.typeRef.appliedTo(body.tpe) ref(lifter).appliedToType(bodyType).select(nme.apply).appliedTo(lit).appliedTo(quotes) - def pickleAsValue(lit: Literal) = { + def pickleAsValue(lit: Literal) = // TODO should all constants be pickled as Literals? // Should examine the generated bytecode size to decide and performance - lit.const.tag match { + lit.const.tag match case Constants.NullTag => pickleAsLiteral(lit) case Constants.UnitTag => pickleAsLiteral(lit) case Constants.BooleanTag => liftedValue(lit, defn.ToExprModule_BooleanToExpr) @@ -286,8 +276,6 @@ object PickleQuotes { case Constants.DoubleTag => liftedValue(lit, defn.ToExprModule_DoubleToExpr) case Constants.CharTag => liftedValue(lit, defn.ToExprModule_CharToExpr) case Constants.StringTag => liftedValue(lit, defn.ToExprModule_StringToExpr) - } - } /** Encode quote using QuoteUnpickler.{unpickleExpr, unpickleType} * @@ -301,7 +289,7 @@ object PickleQuotes { * ``` * this closure is always applied directly to the actual context and the BetaReduce phase removes it. */ - def pickleAsTasty() = { + def pickleAsTasty() = val body1 = if body.isType then body else Inlined(Inlines.inlineCallTrace(ctx.owner, quote.sourcePos), Nil, body) @@ -327,7 +315,7 @@ object PickleQuotes { args => val cases = holeContents.zipWithIndex.map { case (splice, idx) => val defn.FunctionOf(argTypes, defn.FunctionOf(quotesType :: _, _, _), _) = splice.tpe: @unchecked - val rhs = { + val rhs = val spliceArgs = argTypes.zipWithIndex.map { (argType, i) => args(1).select(nme.apply).appliedTo(Literal(Constant(i))).asInstance(argType) } @@ -337,7 +325,6 @@ object PickleQuotes { splice .select(nme.apply).appliedToArgs(spliceArgs)) .select(nme.apply).appliedTo(args(2).asInstance(quotesType)) - } CaseDef(Literal(Constant(idx)), EmptyTree, rhs) } cases match @@ -358,7 +345,6 @@ object PickleQuotes { .asInstance(defn.QuoteUnpicklerClass.typeRef) .select(unpickleMeth).appliedToType(bodyType) .appliedToArgs(unpickleArgs).withSpan(body.span) - } /** Encode quote using Reflection.TypeRepr.typeConstructorOf * @@ -371,11 +357,10 @@ object PickleQuotes { * this closure is always applied directly to the actual context and the BetaReduce phase removes it. */ def taggedType() = - reflect.asType(body.tpe) { + reflect.asType(body.tpe): reflect.TypeRepr_typeConstructorOf( TypeApply(ref(defn.Predef_classOf.termRef), body :: Nil) ) - } def getLiteral(tree: tpd.Tree): Option[Literal] = tree match case tree: Literal => Some(tree) @@ -390,6 +375,4 @@ object PickleQuotes { getLiteral(body) match case Some(lit) => pickleAsValue(lit) case _ => pickleAsTasty() - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/Pickler.scala b/compiler/src/dotty/tools/dotc/transform/Pickler.scala index f5fe34bafc2f..0017f4a07487 100644 --- a/compiler/src/dotty/tools/dotc/transform/Pickler.scala +++ b/compiler/src/dotty/tools/dotc/transform/Pickler.scala @@ -17,7 +17,7 @@ import collection.mutable import util.concurrent.{Executor, Future} import compiletime.uninitialized -object Pickler { +object Pickler: val name: String = "pickler" val description: String = "generates TASTy info" @@ -26,10 +26,9 @@ object Pickler { * only in backend. */ inline val ParallelPickling = true -} /** This phase pickles trees */ -class Pickler extends Phase { +class Pickler extends Phase: import ast.tpd._ override def phaseName: String = Pickler.name @@ -40,22 +39,20 @@ class Pickler extends Phase { override def isRunnable(using Context): Boolean = super.isRunnable && !ctx.settings.fromTasty.value - private def output(name: String, msg: String) = { + private def output(name: String, msg: String) = val s = new PrintStream(name) s.print(msg) s.close - } // Maps that keep a record if -Ytest-pickler is set. private val beforePickling = new mutable.HashMap[ClassSymbol, String] private val pickledBytes = new mutable.HashMap[ClassSymbol, Array[Byte]] /** Drop any elements of this list that are linked module classes of other elements in the list */ - private def dropCompanionModuleClasses(clss: List[ClassSymbol])(using Context): List[ClassSymbol] = { + private def dropCompanionModuleClasses(clss: List[ClassSymbol])(using Context): List[ClassSymbol] = val companionModuleClasses = clss.filterNot(_.is(Module)).map(_.linkedClass).filterNot(_.isAbsent()) clss.filterNot(companionModuleClasses.contains) - } /** Runs given functions with a scratch data block in a serialized fashion (i.e. * inside a synchronized block). Scratch data is re-used between calls. @@ -65,17 +62,16 @@ class Pickler extends Phase { object serialized: val scratch = new ScratchData def run(body: ScratchData => Array[Byte]): Array[Byte] = - synchronized { + synchronized: scratch.reset() body(scratch) - } private val executor = Executor[Array[Byte]]() private def useExecutor(using Context) = Pickler.ParallelPickling && !ctx.settings.YtestPickler.value - override def run(using Context): Unit = { + override def run(using Context): Unit = val unit = ctx.compilationUnit pickling.println(i"unpickling in run ${ctx.runId}") @@ -93,7 +89,7 @@ class Pickler extends Phase { val positionWarnings = new mutable.ListBuffer[Message]() def reportPositionWarnings() = positionWarnings.foreach(report.warning(_)) - def computePickled(): Array[Byte] = inContext(ctx.fresh) { + def computePickled(): Array[Byte] = inContext(ctx.fresh): serialized.run { scratch => treePkl.compactify(scratch) if tree.span.exists then @@ -111,9 +107,8 @@ class Pickler extends Phase { val pickled = pickler.assembleParts() def rawBytes = // not needed right now, but useful to print raw format. - pickled.iterator.grouped(10).toList.zipWithIndex.map { + pickled.iterator.grouped(10).toList.zipWithIndex.map: case (row, i) => s"${i}0: ${row.mkString(" ")}" - } // println(i"rawBytes = \n$rawBytes%\n%") // DEBUG if pickling ne noPrinter then @@ -121,7 +116,6 @@ class Pickler extends Phase { println(TastyPrinter.showContents(pickled, ctx.settings.color.value == "never")) pickled } - } /** A function that returns the pickled bytes. Depending on `Pickler.ParallelPickling` * either computes the pickled data in a future or eagerly before constructing the @@ -141,9 +135,8 @@ class Pickler extends Phase { unit.pickled += (cls -> demandPickled) end for - } - override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = { + override def runOn(units: List[CompilationUnit])(using Context): List[CompilationUnit] = val result = if useExecutor then executor.start() @@ -160,23 +153,19 @@ class Pickler extends Phase { .addMode(Mode.ReadPositions) .addMode(Mode.PrintShowExceptions)) result - } - private def testUnpickler(using Context): Unit = { + private def testUnpickler(using Context): Unit = pickling.println(i"testing unpickler at run ${ctx.runId}") ctx.initialize() val unpicklers = - for ((cls, bytes) <- pickledBytes) yield { + for ((cls, bytes) <- pickledBytes) yield val unpickler = new DottyUnpickler(bytes) unpickler.enter(roots = Set.empty) cls -> unpickler - } pickling.println("************* entered toplevel ***********") - for ((cls, unpickler) <- unpicklers) { + for ((cls, unpickler) <- unpicklers) val unpickled = unpickler.rootTrees testSame(i"$unpickled%\n%", beforePickling(cls), cls) - } - } private def testSame(unpickled: String, previous: String, cls: ClassSymbol)(using Context) = import java.nio.charset.StandardCharsets.UTF_8 @@ -190,4 +179,3 @@ class Pickler extends Phase { | | diff before-pickling.txt after-pickling.txt""") end testSame -} diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index ac3dc15092a0..f6a9bbcba6ff 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -20,10 +20,9 @@ import util.SrcPos import reporting._ import NameKinds.WildcardParamName -object PostTyper { +object PostTyper: val name: String = "posttyper" val description: String = "additional checks and cleanups after type checking" -} /** A macro transform that runs immediately after typer and that performs the following functions: * @@ -67,11 +66,10 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase override def description: String = PostTyper.description - override def checkPostCondition(tree: tpd.Tree)(using Context): Unit = tree match { + override def checkPostCondition(tree: tpd.Tree)(using Context): Unit = tree match case tree: ValOrDefDef => assert(!tree.symbol.signature.isUnderDefined) case _ => - } override def changesMembers: Boolean = true // the phase adds super accessors and synthetic members @@ -84,26 +82,24 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase val synthMbr: SyntheticMembers = new SyntheticMembers(thisPhase) val beanProps: BeanProperties = new BeanProperties(thisPhase) - private def newPart(tree: Tree): Option[New] = methPart(tree) match { + private def newPart(tree: Tree): Option[New] = methPart(tree) match case Select(nu: New, _) => Some(nu) case _ => None - } private def checkValidJavaAnnotation(annot: Tree)(using Context): Unit = { // TODO fill in } - class PostTyperTransformer extends Transformer { + class PostTyperTransformer extends Transformer: private var inJavaAnnot: Boolean = false private var noCheckNews: Set[New] = Set() - def withNoCheckNews[T](ts: List[New])(op: => T): T = { + def withNoCheckNews[T](ts: List[New])(op: => T): T = val saved = noCheckNews noCheckNews ++= ts try op finally noCheckNews = saved - } def isCheckable(t: New): Boolean = !inJavaAnnot && !noCheckNews.contains(t) @@ -131,24 +127,22 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase case _ => case _ => - private def transformAnnot(annot: Tree)(using Context): Tree = { + private def transformAnnot(annot: Tree)(using Context): Tree = val saved = inJavaAnnot inJavaAnnot = annot.symbol.is(JavaDefined) if (inJavaAnnot) checkValidJavaAnnotation(annot) try transform(annot) finally inJavaAnnot = saved - } private def transformAnnot(annot: Annotation)(using Context): Annotation = annot.derivedAnnotation(transformAnnot(annot.tree)) - private def processMemberDef(tree: Tree)(using Context): tree.type = { + private def processMemberDef(tree: Tree)(using Context): tree.type = val sym = tree.symbol Checking.checkValidOperator(sym) sym.transformAnnotations(transformAnnot) sym.defTree = tree tree - } private def processValOrDefDef(tree: Tree)(using Context): tree.type = val sym = tree.symbol @@ -193,65 +187,55 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase case _ => - private def transformSelect(tree: Select, targs: List[Tree])(using Context): Tree = { + private def transformSelect(tree: Select, targs: List[Tree])(using Context): Tree = val qual = tree.qualifier - qual.symbol.moduleClass.denot match { + qual.symbol.moduleClass.denot match case pkg: PackageClassDenotation => val pobj = pkg.packageObjFor(tree.symbol) if (pobj.exists) return transformSelect(cpy.Select(tree)(qual.select(pobj).withSpan(qual.span), tree.name), targs) case _ => - } val tree1 = super.transform(tree) - constToLiteral(tree1) match { + constToLiteral(tree1) match case _: Literal => tree1 case _ => superAcc.transformSelect(tree1, targs) - } - } - private def normalizeTypeArgs(tree: TypeApply)(using Context): TypeApply = tree.tpe match { + private def normalizeTypeArgs(tree: TypeApply)(using Context): TypeApply = tree.tpe match case pt: PolyType => // wait for more arguments coming tree case _ => - def decompose(tree: TypeApply): (Tree, List[Tree]) = tree.fun match { + def decompose(tree: TypeApply): (Tree, List[Tree]) = tree.fun match case fun: TypeApply => val (tycon, args) = decompose(fun) (tycon, args ++ tree.args) case _ => (tree.fun, tree.args) - } - def reorderArgs(pnames: List[Name], namedArgs: List[NamedArg], otherArgs: List[Tree]): List[Tree] = pnames match { + def reorderArgs(pnames: List[Name], namedArgs: List[NamedArg], otherArgs: List[Tree]): List[Tree] = pnames match case pname :: pnames1 => - namedArgs.partition(_.name == pname) match { + namedArgs.partition(_.name == pname) match case (NamedArg(_, arg) :: _, namedArgs1) => arg :: reorderArgs(pnames1, namedArgs1, otherArgs) case _ => val otherArg :: otherArgs1 = otherArgs: @unchecked otherArg :: reorderArgs(pnames1, namedArgs, otherArgs1) - } case nil => assert(namedArgs.isEmpty && otherArgs.isEmpty) Nil - } val (tycon, args) = decompose(tree) - tycon.tpe.widen match { + tycon.tpe.widen match case tp: PolyType if args.exists(isNamedArg) => val (namedArgs, otherArgs) = args.partition(isNamedArg) val args1 = reorderArgs(tp.paramNames, namedArgs.asInstanceOf[List[NamedArg]], otherArgs) TypeApply(tycon, args1).withSpan(tree.span).withType(tree.tpe) case _ => tree - } - } - private object dropInlines extends TreeMap { - override def transform(tree: Tree)(using Context): Tree = tree match { + private object dropInlines extends TreeMap: + override def transform(tree: Tree)(using Context): Tree = tree match case Inlined(call, _, expansion) => val newExpansion = PruneErasedDefs.trivialErasedTree(tree) cpy.Inlined(tree)(call, Nil, newExpansion) case _ => super.transform(tree) - } - } def checkNoConstructorProxy(tree: Tree)(using Context): Unit = if tree.symbol.is(ConstructorProxy) then @@ -270,7 +254,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase else errorTree(tree, em"${tree.symbol} cannot be used as a type") override def transform(tree: Tree)(using Context): Tree = - try tree match { + try tree match // TODO move CaseDef case lower: keep most probable trees first for performance case CaseDef(pat, _, _) => val gadtCtx = @@ -285,10 +269,9 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase else checkNoConstructorProxy(tree) registerNeedsInlining(tree) - tree.tpe match { + tree.tpe match case tpe: ThisType => This(tpe.cls).withSpan(tree.span) case _ => tree - } case tree @ Select(qual, name) => registerNeedsInlining(tree) if name.isTypeName then @@ -349,14 +332,13 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase if (fn.symbol != defn.ChildAnnot.primaryConstructor) // Make an exception for ChildAnnot, which should really have AnyKind bounds Checking.checkBounds(args, fn.tpe.widen.asInstanceOf[PolyType]) - fn match { + fn match case sel: Select => val args1 = transform(args) val sel1 = transformSelect(sel, args1) cpy.TypeApply(tree1)(sel1, args1) case _ => super.transform(tree1) - } case Inlined(call, bindings, expansion) if !call.isEmpty => val pos = call.sourcePos CrossVersionChecks.checkExperimentalRef(call.symbol, pos) @@ -364,14 +346,13 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase val callTrace = Inlines.inlineCallTrace(call.symbol, pos)(using ctx.withSource(pos.source)) cpy.Inlined(tree)(callTrace, transformSub(bindings), transform(expansion)(using inlineContext(call))) case templ: Template => - withNoCheckNews(templ.parents.flatMap(newPart)) { + withNoCheckNews(templ.parents.flatMap(newPart)): forwardParamAccessors(templ) synthMbr.addSyntheticMembers( beanProps.addBeanMethods( superAcc.wrapTemplate(templ)( super.transform(_).asInstanceOf[Template])) ) - } case tree: ValDef => registerIfHasMacroAnnotations(tree) checkErasedDef(tree) @@ -488,12 +469,10 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase super.transform(tree) case tree => super.transform(tree) - } - catch { + catch case ex : AssertionError => println(i"error while transforming $tree") throw ex - } override def transformStats[T](trees: List[Tree], exprOwner: Symbol, wrapResult: List[Tree] => Context ?=> T)(using Context): T = try super.transformStats(trees, exprOwner, wrapResult) @@ -532,5 +511,4 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase sym.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) sym.companionModule.addAnnotation(Annotation(defn.ExperimentalAnnot, sym.span)) - } } diff --git a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala index 6d8f7bdb32cb..1cf12af57eba 100644 --- a/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/ProtectedAccessors.scala @@ -16,7 +16,7 @@ import dotty.tools.dotc.util.Property * from the point of access, but is accessible if the access is from an enclosing * class. In this point a public access method is placed in that enclosing class. */ -object ProtectedAccessors { +object ProtectedAccessors: val name: String = "protectedAccessors" val description: String = "add accessors for protected members" @@ -25,13 +25,12 @@ object ProtectedAccessors { if (sym.is(JavaDefined)) sym.is(JavaStatic) || // Java's static protected definitions are treated as public ctx.owner.enclosingPackageClass == sym.enclosingPackageClass - else { + else // For Scala-defined symbols we currently allow private and protected accesses // from inner packages, and compensate by widening accessibility of such symbols to public. // It would be good if we could revisit this at some point. val boundary = sym.accessBoundary(sym.enclosingPackageClass) ctx.owner.isContainedIn(boundary) || ctx.owner.isContainedIn(boundary.linkedClass) - } /** Do we need a protected accessor if the current context's owner * is not in a subclass or subtrait of `sym`? @@ -45,9 +44,8 @@ object ProtectedAccessors { def needsAccessor(sym: Symbol)(using Context): Boolean = needsAccessorIfNotInSubclass(sym) && !ctx.owner.enclosingClass.derivesFrom(sym.owner) -} -class ProtectedAccessors extends MiniPhase { +class ProtectedAccessors extends MiniPhase: import ast.tpd._ override def phaseName: String = ProtectedAccessors.name @@ -62,20 +60,17 @@ class ProtectedAccessors extends MiniPhase { override def prepareForUnit(tree: Tree)(using Context): Context = ctx.fresh.setProperty(AccessorsKey, new Accessors) - private class Accessors extends AccessProxies { - val insert: Insert = new Insert { + private class Accessors extends AccessProxies: + val insert: Insert = new Insert: def accessorNameOf(name: TermName, site: Symbol)(using Context): TermName = ProtectedAccessorName(name) def needsAccessor(sym: Symbol)(using Context) = ProtectedAccessors.needsAccessor(sym) - override def ifNoHost(reference: RefTree)(using Context): Tree = { + override def ifNoHost(reference: RefTree)(using Context): Tree = val curCls = ctx.owner.enclosingClass transforms.println(i"${curCls.ownersIterator.toList}%, %") report.error(em"illegal access to protected ${reference.symbol.showLocated} from $curCls", reference.srcPos) reference - } - } - } override def transformIdent(tree: Ident)(using Context): Tree = accessors.insert.accessorIfNeeded(tree) @@ -84,14 +79,12 @@ class ProtectedAccessors extends MiniPhase { accessors.insert.accessorIfNeeded(tree) override def transformAssign(tree: Assign)(using Context): Tree = - tree.lhs match { + tree.lhs match case lhs: RefTree if lhs.name.is(ProtectedAccessorName) => cpy.Apply(tree)(accessors.insert.useSetter(lhs), tree.rhs :: Nil) case _ => tree - } override def transformTemplate(tree: Template)(using Context): Tree = cpy.Template(tree)(body = accessors.addAccessorDefs(tree.symbol.owner, tree.body)) -} diff --git a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala index 17f2d11ccfec..0633e22f28f4 100644 --- a/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala +++ b/compiler/src/dotty/tools/dotc/transform/PruneErasedDefs.scala @@ -65,7 +65,7 @@ class PruneErasedDefs extends MiniPhase with SymTransformer { thisTransform => Feature.checkExperimentalFeature("erased", sym.sourcePos) } -object PruneErasedDefs { +object PruneErasedDefs: import tpd._ val name: String = "pruneErasedDefs" @@ -73,4 +73,3 @@ object PruneErasedDefs { def trivialErasedTree(tree: Tree)(using Context): Tree = ref(defn.Compiletime_erasedValue).appliedToType(tree.tpe).withSpan(tree.span) -} diff --git a/compiler/src/dotty/tools/dotc/transform/PureStats.scala b/compiler/src/dotty/tools/dotc/transform/PureStats.scala index b747d7d6b9e4..c01bd6e21b1a 100644 --- a/compiler/src/dotty/tools/dotc/transform/PureStats.scala +++ b/compiler/src/dotty/tools/dotc/transform/PureStats.scala @@ -7,13 +7,12 @@ import MegaPhase._ import Types._, Contexts._, Flags._, DenotTransformers._ import Symbols._, StdNames._, Trees._ -object PureStats { +object PureStats: val name: String = "pureStats" val description: String = "remove pure statements in blocks" -} /** Remove pure statements in blocks */ -class PureStats extends MiniPhase { +class PureStats extends MiniPhase: import tpd._ @@ -24,12 +23,10 @@ class PureStats extends MiniPhase { override def runsAfter: Set[String] = Set(Erasure.name) override def transformBlock(tree: Block)(using Context): Tree = - val stats = tree.stats.mapConserve { + val stats = tree.stats.mapConserve: case Typed(Block(stats, expr), _) if isPureExpr(expr) => Thicket(stats) case stat if !stat.symbol.isConstructor && isPureExpr(stat) => EmptyTree case stat => stat - } if stats eq tree.stats then tree else cpy.Block(tree)(Trees.flatten(stats), tree.expr) -} diff --git a/compiler/src/dotty/tools/dotc/transform/Recheck.scala b/compiler/src/dotty/tools/dotc/transform/Recheck.scala index 527c73d02250..0eda551aad49 100644 --- a/compiler/src/dotty/tools/dotc/transform/Recheck.scala +++ b/compiler/src/dotty/tools/dotc/transform/Recheck.scala @@ -146,9 +146,8 @@ abstract class Recheck extends Phase, SymTransformer: /** If true, remember types of all tree nodes in attachments so that they * can be retrieved with `knownType` */ - private val keepAllTypes = inContext(ictx) { + private val keepAllTypes = inContext(ictx): ictx.settings.Xprint.value.containsPhase(thisPhase) - } /** Should type of `tree` be kept in an attachment so that it can be retrieved with * `knownType`? By default true only is `keepAllTypes` hold, but can be overridden. @@ -416,11 +415,10 @@ abstract class Recheck extends Phase, SymTransformer: traverse(stats) def recheckDef(tree: ValOrDefDef, sym: Symbol)(using Context): Unit = - inContext(ctx.localContext(tree, sym)) { + inContext(ctx.localContext(tree, sym)): tree match case tree: ValDef => recheckValDef(tree, sym) case tree: DefDef => recheckDefDef(tree, sym) - } /** Recheck tree without adapting it, returning its new type. * @param tree the original tree @@ -490,12 +488,11 @@ abstract class Recheck extends Phase, SymTransformer: tpe def recheck(tree: Tree, pt: Type = WildcardType)(using Context): Type = - trace(i"rechecking $tree with pt = $pt", recheckr, show = true) { + trace(i"rechecking $tree with pt = $pt", recheckr, show = true): try recheckFinish(recheckStart(tree, pt), tree, pt) catch case ex: Exception => println(i"error while rechecking $tree") throw ex - } /** Typing and previous transforms sometiems leaves skolem types in prefixes of * NamedTypes in `expected` that do not match the `actual` Type. -Ycheck does @@ -559,9 +556,8 @@ abstract class Recheck extends Phase, SymTransformer: /** Show tree with rechecked types instead of the types stored in the `.tpe` field */ override def show(tree: untpd.Tree)(using Context): String = - atPhase(thisPhase) { + atPhase(thisPhase): super.show(addRecheckedTypes.transform(tree.asInstanceOf[tpd.Tree])) - } end Recheck /** A class that can be used to test basic rechecking without any customaization */ diff --git a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala index d6c11fe36748..5aa42f94217a 100644 --- a/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala +++ b/compiler/src/dotty/tools/dotc/transform/RepeatableAnnotations.scala @@ -55,10 +55,9 @@ class RepeatableAnnotations extends MiniPhase: private def stableGroupBy[A, K](ins: Seq[A], f: A => K): scala.collection.MapView[K, List[A]] = val out = new mutable.LinkedHashMap[K, mutable.ListBuffer[A]]() - for (in <- ins) { + for (in <- ins) val buffer = out.getOrElseUpdate(f(in), new mutable.ListBuffer) buffer += in - } out.view.mapValues(_.toList) object RepeatableAnnotations: diff --git a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala index 99b6be1eea8a..b42d102e08a2 100644 --- a/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala +++ b/compiler/src/dotty/tools/dotc/transform/ResolveSuper.scala @@ -42,7 +42,7 @@ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase = override def changesMembers: Boolean = true // the phase adds super accessors - override def transformTemplate(impl: Template)(using Context): Template = { + override def transformTemplate(impl: Template)(using Context): Template = val cls = impl.symbol.owner.asClass val ops = new MixinOps(cls, thisPhase) import ops._ @@ -58,22 +58,19 @@ class ResolveSuper extends MiniPhase with IdentityDenotTransformer { thisPhase = val overrides = mixins.flatMap(superAccessors) cpy.Template(impl)(body = overrides ::: impl.body) - } - override def transformDefDef(ddef: DefDef)(using Context): Tree = { + override def transformDefDef(ddef: DefDef)(using Context): Tree = val meth = ddef.symbol.asTerm - if (meth.isSuperAccessor && !meth.is(Deferred)) { + if (meth.isSuperAccessor && !meth.is(Deferred)) assert(ddef.rhs.isEmpty, ddef.symbol) val cls = meth.owner.asClass val ops = new MixinOps(cls, thisPhase) import ops._ DefDef(meth, forwarderRhsFn(rebindSuper(cls, meth))) - } else ddef - } } -object ResolveSuper { +object ResolveSuper: val name: String = "resolveSuper" val description: String = "implement super accessors" @@ -82,7 +79,7 @@ object ResolveSuper { * @param base The class in which everything is mixed together * @param acc The symbol statically referred to by the superaccessor in the trait */ - def rebindSuper(base: Symbol, acc: Symbol)(using Context): Symbol = { + def rebindSuper(base: Symbol, acc: Symbol)(using Context): Symbol = var bcs = base.info.baseClasses.dropWhile(acc.owner != _).tail var sym: Symbol = NoSymbol @@ -100,7 +97,7 @@ object ResolveSuper { report.debuglog(i"starting rebindsuper from $base of ${acc.showLocated}: ${acc.info} in $bcs, name = $memberName") - while (bcs.nonEmpty && sym == NoSymbol) { + while (bcs.nonEmpty && sym == NoSymbol) val other = bcs.head.info.nonPrivateDecl(memberName) .filterWithPredicate(denot => mix.isEmpty || denot.symbol.owner.name == mix) .matchingDenotation(base.thisType, base.thisType.memberInfo(acc), targetName) @@ -118,14 +115,10 @@ object ResolveSuper { if !otherTp.overrides(accTp, relaxedOverriding, matchLoosely = true) then report.error(IllegalSuperAccessor(base, memberName, targetName, acc, accTp, other.symbol, otherTp), base.srcPos) bcs = bcs.tail - } if sym.is(Accessor) then report.error( em"parent ${acc.owner} has a super call which binds to the value ${sym.showFullName}. Super calls can only target methods.", base) - sym.orElse { + sym.orElse: val originalName = acc.name.asTermName.originalOfSuperAccessorName report.error(em"Member method ${originalName.debugString} of mixin ${acc.owner} is missing a concrete super implementation in $base.", base.srcPos) acc - } - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala index d01be0419a4d..26af1c626d9c 100644 --- a/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala +++ b/compiler/src/dotty/tools/dotc/transform/RestoreScopes.scala @@ -26,12 +26,11 @@ class RestoreScopes extends MiniPhase with IdentityDenotTransformer { thisPhase * enclosing package definitions. So by the time RestoreScopes gets to * see a typedef or template, it still might be changed by DropEmptyConstructors. */ - override def transformPackageDef(pdef: PackageDef)(using Context): PackageDef = { + override def transformPackageDef(pdef: PackageDef)(using Context): PackageDef = pdef.stats.foreach(restoreScope) pdef - } - private def restoreScope(tree: Tree)(using Context) = tree match { + private def restoreScope(tree: Tree)(using Context) = tree match case TypeDef(_, impl: Template) => val restoredDecls = newScope for (stat <- impl.constr :: impl.body) @@ -48,7 +47,6 @@ class RestoreScopes extends MiniPhase with IdentityDenotTransformer { thisPhase decls = restoredDecls: Scope)).installAfter(thisPhase) tree case tree => tree - } } object RestoreScopes: diff --git a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala index 1df9809c2f62..20fc3e052201 100644 --- a/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala +++ b/compiler/src/dotty/tools/dotc/transform/SelectStatic.scala @@ -42,14 +42,14 @@ import dotty.tools.dotc.transform.SymUtils._ * * @author Dmytro Petrashko */ -class SelectStatic extends MiniPhase with IdentityDenotTransformer { +class SelectStatic extends MiniPhase with IdentityDenotTransformer: import ast.tpd._ override def phaseName: String = SelectStatic.name override def description: String = SelectStatic.description - override def transformSelect(tree: tpd.Select)(using Context): tpd.Tree = { + override def transformSelect(tree: tpd.Select)(using Context): tpd.Tree = val sym = tree.symbol def isStaticMember = (sym is Flags.Module) && sym.initial.maybeOwner.initial.isStaticOwner || @@ -63,18 +63,16 @@ class SelectStatic extends MiniPhase with IdentityDenotTransformer { else tree normalize(tree1) - } - private def isStaticOwnerRef(tree: Tree)(using Context): Boolean = tree match { + private def isStaticOwnerRef(tree: Tree)(using Context): Boolean = tree match case Ident(_) => tree.symbol.is(Module) && tree.symbol.moduleClass.isStaticOwner case Select(qual, _) => isStaticOwnerRef(qual) && tree.symbol.is(Module) && tree.symbol.moduleClass.isStaticOwner case _ => false - } - private def normalize(t: Tree)(using Context) = t match { + private def normalize(t: Tree)(using Context) = t match case Select(Block(stats, qual), nm) => Block(stats, cpy.Select(t)(qual, nm)) case Apply(Block(stats, qual), nm) => @@ -84,7 +82,6 @@ class SelectStatic extends MiniPhase with IdentityDenotTransformer { case Closure(env, Block(stats, qual), tpt) => Block(stats, Closure(env, qual, tpt)) case _ => t - } override def transformApply(tree: tpd.Apply)(using Context): tpd.Tree = normalize(tree) @@ -94,7 +91,6 @@ class SelectStatic extends MiniPhase with IdentityDenotTransformer { override def transformClosure(tree: tpd.Closure)(using Context): tpd.Tree = normalize(tree) -} object SelectStatic: val name: String = "selectStatic" diff --git a/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala index 2f586104c4e3..1a11f2773cec 100644 --- a/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala +++ b/compiler/src/dotty/tools/dotc/transform/SeqLiterals.scala @@ -14,7 +14,7 @@ import Contexts._ * is called directly. The reason for this step is that JavaSeqLiterals, being arrays * keep a precise type after erasure, whereas SeqLiterals only get the erased type `Seq`, */ -class SeqLiterals extends MiniPhase { +class SeqLiterals extends MiniPhase: import ast.tpd._ override def phaseName: String = SeqLiterals.name @@ -23,20 +23,17 @@ class SeqLiterals extends MiniPhase { override def runsAfter: Set[String] = Set(PatternMatcher.name) - override def checkPostCondition(tree: Tree)(using Context): Unit = tree match { + override def checkPostCondition(tree: Tree)(using Context): Unit = tree match case tpd: SeqLiteral => assert(tpd.isInstanceOf[JavaSeqLiteral]) case _ => - } - override def transformSeqLiteral(tree: SeqLiteral)(using Context): Tree = tree match { + override def transformSeqLiteral(tree: SeqLiteral)(using Context): Tree = tree match case tree: JavaSeqLiteral => tree case _ => val arr = JavaSeqLiteral(tree.elems, tree.elemtpt) //println(i"trans seq $tree, arr = $arr: ${arr.tpe} ${arr.tpe.elemType}") val elemtp = tree.elemtpt.tpe wrapArray(arr, elemtp).withSpan(tree.span).ensureConforms(tree.tpe) - } -} object SeqLiterals: val name: String = "seqLiterals" diff --git a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala index d17dbbecc555..32e9d7015888 100644 --- a/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala +++ b/compiler/src/dotty/tools/dotc/transform/SetRootTree.scala @@ -5,7 +5,7 @@ import dotty.tools.dotc.core.Contexts._ import dotty.tools.dotc.core.Phases.Phase /** Set the `rootTreeOrProvider` property of class symbols. */ -class SetRootTree extends Phase { +class SetRootTree extends Phase: override val phaseName: String = SetRootTree.name @@ -17,33 +17,26 @@ class SetRootTree extends Phase { // Check no needed. Does not transform trees override def isCheckable: Boolean = false - override def run(using Context): Unit = { + override def run(using Context): Unit = val tree = ctx.compilationUnit.tpdTree traverser.traverse(tree) - } - private def traverser = new tpd.TreeTraverser { + private def traverser = new tpd.TreeTraverser: override def traverse(tree: tpd.Tree)(using Context): Unit = - tree match { + tree match case pkg: tpd.PackageDef => traverseChildren(pkg) case td: tpd.TypeDef => - if (td.symbol.isClass) { + if (td.symbol.isClass) val sym = td.symbol.asClass - tpd.sliceTopLevel(ctx.compilationUnit.tpdTree, sym) match { + tpd.sliceTopLevel(ctx.compilationUnit.tpdTree, sym) match case (pkg: tpd.PackageDef) :: Nil => sym.rootTreeOrProvider = pkg case _ => sym.rootTreeOrProvider = td - } - } case _ => () - } - } -} -object SetRootTree { +object SetRootTree: val name: String = "SetRootTree" val description: String = "set the rootTreeOrProvider on class symbols" -} diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala index 6ffa05075201..b7385c8f7e6e 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeApplyMethods.scala @@ -17,7 +17,7 @@ import scala.collection.mutable * can hardcode them. This should, however be removed once we're using a * different standard library. */ -class SpecializeApplyMethods extends MiniPhase with InfoTransformer { +class SpecializeApplyMethods extends MiniPhase with InfoTransformer: import ast.tpd._ override def phaseName: String = SpecializeApplyMethods.name @@ -27,36 +27,32 @@ class SpecializeApplyMethods extends MiniPhase with InfoTransformer { override def isEnabled(using Context): Boolean = !ctx.settings.scalajs.value - private def specApplySymbol(sym: Symbol, args: List[Type], ret: Type)(using Context): Symbol = { + private def specApplySymbol(sym: Symbol, args: List[Type], ret: Type)(using Context): Symbol = val name = nme.apply.specializedFunction(ret, args) // Create the symbol at the next phase, so that it is a valid member of the // corresponding function for all valid periods of its SymDenotations. // Otherwise, the valid period will offset by 1, which causes a stale symbol // in compiling stdlib. atNextPhase(newSymbol(sym, name, Flags.Method, MethodType(args, ret))) - } - private inline def specFun0(inline op: Type => Unit)(using Context): Unit = { + private inline def specFun0(inline op: Type => Unit)(using Context): Unit = for (r <- defn.Function0SpecializedReturnTypes) do op(r) - } - private inline def specFun1(inline op: (Type, Type) => Unit)(using Context): Unit = { + private inline def specFun1(inline op: (Type, Type) => Unit)(using Context): Unit = for r <- defn.Function1SpecializedReturnTypes t1 <- defn.Function1SpecializedParamTypes do op(t1, r) - } - private inline def specFun2(inline op: (Type, Type, Type) => Unit)(using Context): Unit = { + private inline def specFun2(inline op: (Type, Type, Type) => Unit)(using Context): Unit = for r <- defn.Function2SpecializedReturnTypes t1 <- defn.Function2SpecializedParamTypes t2 <- defn.Function2SpecializedParamTypes do op(t1, t2, r) - } override def infoMayChange(sym: Symbol)(using Context) = sym == defn.Function0 @@ -64,7 +60,7 @@ class SpecializeApplyMethods extends MiniPhase with InfoTransformer { || sym == defn.Function2 /** Add symbols for specialized methods to FunctionN */ - override def transformInfo(tp: Type, sym: Symbol)(using Context) = tp match { + override def transformInfo(tp: Type, sym: Symbol)(using Context) = tp match case tp: ClassInfo => if sym == defn.Function0 then val scope = tp.decls.cloneScope @@ -84,13 +80,12 @@ class SpecializeApplyMethods extends MiniPhase with InfoTransformer { else tp case _ => tp - } /** Create bridge methods for FunctionN with specialized applys */ - override def transformTemplate(tree: Template)(using Context) = { + override def transformTemplate(tree: Template)(using Context) = val cls = tree.symbol.owner.asClass - def synthesizeApply(names: collection.Set[TermName]): Tree = { + def synthesizeApply(names: collection.Set[TermName]): Tree = val applyBuf = new mutable.ListBuffer[DefDef] names.foreach { name => val applySym = cls.info.decls.lookup(name) @@ -106,7 +101,6 @@ class SpecializeApplyMethods extends MiniPhase with InfoTransformer { applyBuf += ddef } cpy.Template(tree)(body = tree.body ++ applyBuf) - } if cls == defn.Function0 then synthesizeApply(defn.Function0SpecializedApplyNames) @@ -116,8 +110,6 @@ class SpecializeApplyMethods extends MiniPhase with InfoTransformer { synthesizeApply(defn.Function2SpecializedApplyNames) else tree - } -} object SpecializeApplyMethods: val name: String = "specializeApplyMethods" diff --git a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala index 2248fbc8d570..93ebd85cef00 100644 --- a/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala +++ b/compiler/src/dotty/tools/dotc/transform/SpecializeFunctions.scala @@ -10,7 +10,7 @@ import MegaPhase.MiniPhase /** Specializes classes that inherit from `FunctionN` where there exists a * specialized form. */ -class SpecializeFunctions extends MiniPhase { +class SpecializeFunctions extends MiniPhase: import ast.tpd._ override def phaseName: String = SpecializeFunctions.name @@ -24,7 +24,7 @@ class SpecializeFunctions extends MiniPhase { /** Create forwarders from the generic applys to the specialized ones. */ - override def transformDefDef(ddef: DefDef)(using Context) = { + override def transformDefDef(ddef: DefDef)(using Context) = if ddef.name != nme.apply || ddef.termParamss.length != 1 || ddef.termParamss.head.length > 2 @@ -37,12 +37,11 @@ class SpecializeFunctions extends MiniPhase { var specName: Name | Null = null - def isSpecializable = { + def isSpecializable = val paramTypes = ddef.termParamss.head.map(_.symbol.info) val retType = sym.info.finalResultType specName = nme.apply.specializedFunction(retType, paramTypes) defn.isSpecializableFunction(cls, paramTypes, retType) - } if (sym.is(Flags.Deferred) || !isSpecializable) return ddef @@ -65,11 +64,10 @@ class SpecializeFunctions extends MiniPhase { val rhs = This(cls).select(specializedApply).appliedToTermArgs(args) val ddef1 = cpy.DefDef(ddef)(rhs = rhs) Thicket(ddef1, specializedDecl) - } /** Dispatch to specialized `apply`s in user code when available */ override def transformApply(tree: Apply)(using Context) = - tree match { + tree match case Apply(fun: NameTree, args) if fun.name == nme.apply && args.size <= 3 && fun.symbol.maybeOwner.isType => val argTypes = fun.tpe.widen.firstParamTypes.map(_.widenSingleton.dealias) val retType = tree.tpe.widenSingleton.dealias @@ -101,13 +99,11 @@ class SpecializeFunctions extends MiniPhase { newSel.appliedToTermArgs(args) else tree case _ => tree - } private def derivesFromFn012(cls: ClassSymbol)(using Context): Boolean = cls.baseClasses.exists { p => p == defn.Function0 || p == defn.Function1 || p == defn.Function2 } -} object SpecializeFunctions: val name: String = "specializeFunctions" diff --git a/compiler/src/dotty/tools/dotc/transform/Splicer.scala b/compiler/src/dotty/tools/dotc/transform/Splicer.scala index 741c770e2c77..3989c8f447e5 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicer.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicer.scala @@ -33,7 +33,7 @@ import scala.quoted.Quotes import scala.quoted.runtime.impl._ /** Utility class to splice quoted expressions */ -object Splicer { +object Splicer: import tpd.* import Interpreter.* @@ -43,7 +43,7 @@ object Splicer { * * See: `Staging` */ - def splice(tree: Tree, splicePos: SrcPos, spliceExpansionPos: SrcPos, classLoader: ClassLoader)(using Context): Tree = tree match { + def splice(tree: Tree, splicePos: SrcPos, spliceExpansionPos: SrcPos, classLoader: ClassLoader)(using Context): Tree = tree match case Quote(quotedTree, Nil) => quotedTree case _ => val macroOwner = newSymbol(ctx.owner, nme.MACROkw, Macro | Synthetic, defn.AnyType, coord = tree.span) @@ -52,7 +52,7 @@ object Splicer { inContext(sliceContext) { val oldContextClassLoader = Thread.currentThread().getContextClassLoader Thread.currentThread().setContextClassLoader(classLoader) - try { + try val interpreter = new SpliceInterpreter(splicePos, classLoader) // Some parts of the macro are evaluated during the unpickling performed in quotedExprToTree @@ -60,11 +60,10 @@ object Splicer { val interpretedTree = interpretedExpr.fold(tree)(macroClosure => PickledQuotes.quotedExprToTree(macroClosure(QuotesImpl()))) checkEscapedVariables(interpretedTree, macroOwner) - } finally { + finally Thread.currentThread().setContextClassLoader(oldContextClassLoader) - } }.changeOwner(macroOwner, ctx.owner) - catch { + catch case ex: CompilationUnit.SuspendException => throw ex case ex: scala.quoted.runtime.StopMacroExpansion => @@ -83,8 +82,6 @@ object Splicer { """ report.error(msg, spliceExpansionPos) ref(defn.Predef_undefined).withType(ErrorType(msg)) - } - } /** Checks that no symbol that was generated within the macro expansion has an out of scope reference */ def checkEscapedVariables(tree: Tree, expansionOwner: Symbol)(using Context): tree.type = @@ -92,10 +89,9 @@ object Splicer { private[this] var locals = Set.empty[Symbol] private def markSymbol(sym: Symbol)(using Context): Unit = locals = locals + sym - private def markDef(tree: Tree)(using Context): Unit = tree match { + private def markDef(tree: Tree)(using Context): Unit = tree match case tree: DefTree => markSymbol(tree.symbol) case _ => - } def traverse(tree: Tree)(using Context): Unit = def traverseOver(lastEntered: Set[Symbol]) = try traverseChildren(tree) @@ -135,12 +131,12 @@ object Splicer { * * See: `Staging` */ - def checkValidMacroBody(tree: Tree)(using Context): Unit = tree match { + def checkValidMacroBody(tree: Tree)(using Context): Unit = tree match case Quote(_, Nil) => // ok case _ => type Env = Set[Symbol] - def checkValidStat(tree: Tree)(using Env): Env = tree match { + def checkValidStat(tree: Tree)(using Env): Env = tree match case tree: ValDef if tree.symbol.is(Synthetic) => // Check val from `foo(j = x, i = y)` which it is expanded to // `val j$1 = x; val i$1 = y; foo(i = i$1, j = j$1)` @@ -149,20 +145,18 @@ object Splicer { case _ => report.error("Macro should not have statements", tree.srcPos) summon[Env] - } - def checkIfValidArgument(tree: Tree)(using Env): Unit = tree match { + def checkIfValidArgument(tree: Tree)(using Env): Unit = tree match case Block(Nil, expr) => checkIfValidArgument(expr) case Typed(expr, _) => checkIfValidArgument(expr) case Apply(Select(Quote(body, _), nme.apply), _) => - val noSpliceChecker = new TreeTraverser { + val noSpliceChecker = new TreeTraverser: def traverse(tree: Tree)(using Context): Unit = tree match case Splice(_) => report.error("Quoted argument of macros may not have splices", tree.srcPos) case _ => traverseChildren(tree) - } noSpliceChecker.traverse(body) case Apply(TypeApply(fn, List(quoted)), _)if fn.symbol == defn.QuotedTypeModule_of => @@ -190,9 +184,8 @@ object Splicer { | * Literal values of primitive types | * References to `inline val`s |""".stripMargin, tree.srcPos) - } - def checkIfValidStaticCall(tree: Tree)(using Env): Unit = tree match { + def checkIfValidStaticCall(tree: Tree)(using Env): Unit = tree match case closureDef(ddef @ DefDef(_, ValDefs(ev :: Nil) :: Nil, _, _)) if ddef.symbol.info.isContextualMethod => checkIfValidStaticCall(ddef.rhs)(using summon[Env] + ev.symbol) @@ -220,10 +213,8 @@ object Splicer { | |Expected the splice ${...} to contain a single call to a static method. |""".stripMargin, tree.srcPos) - } checkIfValidStaticCall(tree)(using Set.empty) - } /** Is this the dummy owner of a macro expansion */ def isMacroOwner(sym: Symbol)(using Context): Boolean = @@ -236,18 +227,17 @@ object Splicer { /** Tree interpreter that evaluates the tree. * Interpreter is assumed to start at quotation level -1. */ - private class SpliceInterpreter(pos: SrcPos, classLoader: ClassLoader)(using Context) extends Interpreter(pos, classLoader) { + private class SpliceInterpreter(pos: SrcPos, classLoader: ClassLoader)(using Context) extends Interpreter(pos, classLoader): - override protected def interpretTree(tree: Tree)(implicit env: Env): Object = tree match { + override protected def interpretTree(tree: Tree)(implicit env: Env): Object = tree match // Interpret level -1 quoted code `'{...}` (assumed without level 0 splices) case Apply(Select(Quote(body, _), nme.apply), _) => - val body1 = body match { + val body1 = body match case expr: Ident if expr.symbol.isAllOf(InlineByNameProxy) => // inline proxy for by-name parameter expr.symbol.defTree.asInstanceOf[DefDef].rhs case Inlined(EmptyTree, _, body1) => body1 case _ => body - } new ExprImpl(Inlined(EmptyTree, Nil, QuoteUtils.changeOwnerOfTree(body1, ctx.owner)).withSpan(body1.span), SpliceScope.getCurrent) // Interpret level -1 `Type.of[T]` @@ -256,6 +246,3 @@ object Splicer { case _ => super.interpretTree(tree) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/Splicing.scala b/compiler/src/dotty/tools/dotc/transform/Splicing.scala index ff5dc5042eaf..fc7cc4efc1c7 100644 --- a/compiler/src/dotty/tools/dotc/transform/Splicing.scala +++ b/compiler/src/dotty/tools/dotc/transform/Splicing.scala @@ -139,15 +139,13 @@ class Splicing extends MacroTransform: } /** Transform trees within annotations */ - private def transformAnnotTrees(using Context) = new TypeMap { - override def apply(tp: Type): Type = { + private def transformAnnotTrees(using Context) = new TypeMap: + override def apply(tp: Type): Type = tp match case tp @ AnnotatedType(underlying, annot) => val underlying1 = this(underlying) derivedAnnotatedType(tp, underlying1, annot.derivedAnnotation(transform(annot.tree))) case _ => mapOver(tp) - } - } end QuoteTransformer @@ -196,10 +194,9 @@ class Splicing extends MacroTransform: case tree: RefTree => if tree.isTerm then if isCaptured(tree.symbol) then - val tpe = tree.tpe.widenTermRefExpr match { + val tpe = tree.tpe.widenTermRefExpr match case tpw: MethodicType => tpw.toFunctionType(isJava = false) case tpw => tpw - } spliced(tpe)(capturedTerm(tree)) else super.transform(tree) else // tree.isType then @@ -230,15 +227,14 @@ class Splicing extends MacroTransform: private def transformLevel0Quote(quote: Quote)(using Context): Tree = // transform and collect new healed types - val (tags, body1) = inContextWithQuoteTypeTags { + val (tags, body1) = inContextWithQuoteTypeTags: transform(quote.body)(using quoteContext) - } cpy.Quote(quote)(body1, quote.tags ::: tags) class ArgsClause(val args: List[Tree]): def isTerm: Boolean = args.isEmpty || args.head.isTerm - private object CapturedApplication { + private object CapturedApplication: /** Matches and application `f(...)` (possibly with several argument clauses) where `f` is captured */ def unapply(tree: Tree)(using Context): Option[(RefTree, List[ArgsClause])] = tree match @@ -248,7 +244,6 @@ class Splicing extends MacroTransform: Some((fn, argss :+ ArgsClause(args))) case _ => None - } private def containsCapturedType(tpe: Type)(using Context): Boolean = tpe.existsPart(t => isCaptured(t.typeSymbol) || isCaptured(t.termSymbol), StopAt.Static) @@ -259,14 +254,13 @@ class Splicing extends MacroTransform: * Registers `x` as a captured variable in the hole and creates an `x$1` `Expr` reference to it. */ private def transformSplicedAssign(tree: Assign)(using Context): Tree = - spliced(tree.tpe) { + spliced(tree.tpe): reflect.asExpr(tree.tpe)( reflect.Assign( reflect.asTerm(capturedTerm(tree.lhs)), reflect.asTerm(quoted(transform(tree.rhs))) ) ) - } /** Transform an application `f(a1, a2, ...)` with a captured `f` to * `${ Apply(f$1.asTerm, List('{a1$}.asTerm, '{a2$}.asTerm, ...)).asExpr.asInstanceOf[Expr[T]] }` @@ -276,18 +270,16 @@ class Splicing extends MacroTransform: * It also handles cases with multiple argument clauses using nested `Apply`/`TypeApply`. */ private def transformCapturedApplication(tree: Tree, fn: RefTree, argss: List[ArgsClause])(using Context): Tree = - spliced(tree.tpe) { + spliced(tree.tpe): def TermList(args: List[Tree]): List[Tree] = args.map(arg => reflect.asTerm(quoted(transform(arg)(using spliceContext)))) def TypeTreeList(args: List[Tree]): List[Tree] = args.map(arg => reflect.Inferred(reflect.TypeReprOf(transform(arg)(using spliceContext).tpe))) - reflect.asExpr(tree.tpe) { + reflect.asExpr(tree.tpe): argss.foldLeft[Tree](reflect.asTerm(capturedTerm(fn, defn.AnyType))) { (acc, clause) => if clause.isTerm then reflect.Apply(acc, TermList(clause.args)) else reflect.TypeApply(acc, TypeTreeList(clause.args)) } - } - } private def capturedTerm(tree: Tree)(using Context): Tree = val tpe = tree.tpe.widenTermRefExpr match @@ -320,9 +312,9 @@ class Splicing extends MacroTransform: bindingSym private def capturedPartTypes(quote: Quote)(using Context): Tree = - val (tags, body1) = inContextWithQuoteTypeTags { - val capturePartTypes = new TypeMap { - def apply(tp: Type) = tp match { + val (tags, body1) = inContextWithQuoteTypeTags: + val capturePartTypes = new TypeMap: + def apply(tp: Type) = tp match case typeRef: TypeRef if containsCapturedType(typeRef) => val termRef = refBindingMap .getOrElseUpdate(typeRef.symbol, (TypeTree(typeRef), newQuotedTypeClassBinding(typeRef)))._2.termRef @@ -330,10 +322,7 @@ class Splicing extends MacroTransform: tagRef case _ => mapOver(tp) - } - } TypeTree(capturePartTypes(quote.body.tpe.widenTermRefExpr)) - } cpy.Quote(quote)(body1, quote.tags ::: tags) private def getTagRefFor(tree: Tree)(using Context): Tree = @@ -354,9 +343,8 @@ class Splicing extends MacroTransform: val methTpe = ContextualMethodType(List(defn.QuotesClass.typeRef), exprTpe) val meth = newSymbol(ctx.owner, nme.ANON_FUN, Synthetic | Method, methTpe) Closure(meth, argss => { - withCurrentQuote(argss.head.head) { + withCurrentQuote(argss.head.head): body(using ctx.withOwner(meth)).changeOwner(ctx.owner, meth) - } }) Splice(closure, tpe) @@ -364,9 +352,8 @@ class Splicing extends MacroTransform: tpd.Quote(expr, Nil).select(nme.apply).appliedTo(quotes.nn) /** Helper methods to construct trees calling methods in `Quotes.reflect` based on the current `quotes` tree */ - private object reflect extends ReifiedReflect { + private object reflect extends ReifiedReflect: def quotesTree = quotes.nn - } end SpliceTransformer diff --git a/compiler/src/dotty/tools/dotc/transform/Staging.scala b/compiler/src/dotty/tools/dotc/transform/Staging.scala index 43cbe80ce8c4..6252aa51c6fe 100644 --- a/compiler/src/dotty/tools/dotc/transform/Staging.scala +++ b/compiler/src/dotty/tools/dotc/transform/Staging.scala @@ -19,7 +19,7 @@ import dotty.tools.dotc.staging.HealType * * See `CrossStageSafety` */ -class Staging extends MacroTransform { +class Staging extends MacroTransform: import tpd._ override def phaseName: String = Staging.name @@ -31,13 +31,13 @@ class Staging extends MacroTransform { override def allowsImplicitSearch: Boolean = true override def checkPostCondition(tree: Tree)(using Context): Unit = - if (ctx.phase <= stagingPhase) { + if (ctx.phase <= stagingPhase) // Recheck that staging level consistency holds but do not heal any inconsistent types as they should already have been heald - tree match { + tree match case PackageDef(pid, _) if tree.symbol.owner == defn.RootClass => - val checker = new CrossStageSafety { + val checker = new CrossStageSafety: override protected def healType(pos: SrcPos)(tpe: Type)(using Context) = new HealType(pos) { - override protected def tryHeal(tp: TypeRef): TypeRef = { + override protected def tryHeal(tp: TypeRef): TypeRef = val sym = tp.symbol def symStr = if (sym.is(ModuleClass)) sym.sourceModule.show @@ -51,42 +51,33 @@ class Staging extends MacroTransform { | - but the access is at level $level.$errMsg""") tp - } }.apply(tpe) - } checker.transform(tree) case _ => - } - } if !Inlines.inInlineMethod then - tree match { + tree match case tree: RefTree => assert(level != 0 || tree.symbol != defn.QuotedTypeModule_of, "scala.quoted.Type.of at level 0 should have been replaced with Quote AST in staging phase") case _ => - } - tree.tpe match { + tree.tpe match case tpe @ TypeRef(prefix, _) if tpe.typeSymbol.isTypeSplice => // Type splices must have a know term ref, usually to an implicit argument // This is mostly intended to catch `quoted.Type[T]#splice` types which should just be `T` assert(prefix.isInstanceOf[TermRef] || prefix.isInstanceOf[ThisType], prefix) case _ => // OK - } end checkPostCondition override def run(using Context): Unit = if (ctx.compilationUnit.needsStaging) super.run - protected def newTransformer(using Context): Transformer = new Transformer { + protected def newTransformer(using Context): Transformer = new Transformer: override def transform(tree: tpd.Tree)(using Context): tpd.Tree = (new CrossStageSafety).transform(tree) - } -} -object Staging { +object Staging: val name: String = "staging" val description: String = "check staging levels and heal staged types" -} diff --git a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala index b78c75d58340..7e2d8ac14fa9 100644 --- a/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala +++ b/compiler/src/dotty/tools/dotc/transform/SuperAccessors.scala @@ -30,7 +30,7 @@ import NameKinds.{ SuperAccessorName, ExpandPrefixName } * * (4) Super calls do not go to synthetic field accessors */ -class SuperAccessors(thisPhase: DenotTransformer) { +class SuperAccessors(thisPhase: DenotTransformer): import tpd._ @@ -48,12 +48,11 @@ class SuperAccessors(thisPhase: DenotTransformer) { */ private var invalidEnclClass: Symbol = NoSymbol - def withInvalidCurrentClass[A](trans: => A)(using Context): A = { + def withInvalidCurrentClass[A](trans: => A)(using Context): A = val saved = invalidEnclClass invalidEnclClass = ctx.owner.enclosingClass try trans finally invalidEnclClass = saved - } private def validCurrentClass(using Context): Boolean = ctx.owner.enclosingClass != invalidEnclClass @@ -62,7 +61,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { private val accDefs = MutableSymbolMap[mutable.ListBuffer[Tree]]() /** A super accessor call corresponding to `sel` */ - private def superAccessorCall(sel: Select, mixName: Name = nme.EMPTY)(using Context) = { + private def superAccessorCall(sel: Select, mixName: Name = nme.EMPTY)(using Context) = val Select(qual, name) = sel val sym = sel.symbol val clazz = qual.symbol.asClass @@ -78,7 +77,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { val accRange = sel.span.focus val superAcc = clazz.info.decl(superName) .suchThat(_.signature == superInfo.signature).symbol - .orElse { + .orElse: report.debuglog(s"add super acc ${sym.showLocated} to $clazz") val maybeDeferred = if (clazz.is(Trait)) Deferred else EmptyFlags val acc = newSymbol( @@ -92,15 +91,13 @@ class SuperAccessors(thisPhase: DenotTransformer) { sel.srcPos) else accDefs(clazz) += DefDef(acc, EmptyTree).withSpan(accRange) acc - } This(clazz).select(superAcc).withSpan(sel.span) - } /** Check selection `super.f` for conforming to rules. If necessary, * replace by a super accessor call. */ - private def transformSuperSelect(sel: Select)(using Context): Tree = { + private def transformSuperSelect(sel: Select)(using Context): Tree = val Select(sup @ Super(_, mix), name) = sel: @unchecked val sym = sel.symbol assert(sup.symbol.exists, s"missing symbol in $sel: ${sup.tpe}") @@ -112,7 +109,7 @@ class SuperAccessors(thisPhase: DenotTransformer) { report.error(em"super may be not be used on ${sym.underlyingSymbol}", sel.srcPos) else if (isDisallowed(sym)) report.error(em"super not allowed here: use this.${sel.name} instead", sel.srcPos) - else if (sym.is(Deferred)) { + else if (sym.is(Deferred)) val member = sym.overridingSymbol(clazz.asClass) if (!mix.name.isEmpty || !member.exists || @@ -121,20 +118,18 @@ class SuperAccessors(thisPhase: DenotTransformer) { em"${sym.showLocated} is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'", sel.srcPos) else report.log(i"ok super $sel ${sym.showLocated} $member $clazz ${member.isIncompleteIn(clazz)}") - } - else { + else val owner = sym.owner if (!owner.is(Trait)) if (mix.name.isEmpty) // scala/bug#4989 Check if an intermediate class between `clazz` and `sym.owner` redeclares the method as abstract. - for (intermediateClass <- clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)) { + for (intermediateClass <- clazz.info.baseClasses.tail.takeWhile(_ != sym.owner)) val overriding = sym.overridingSymbol(intermediateClass) if (overriding.is(Deferred, butNot = AbsOverride) && !overriding.owner.is(Trait)) report.error( em"${sym.showLocated} cannot be directly accessed from ${clazz} because ${overriding.owner} redeclares it as abstract", sel.srcPos) - } - else { + else // scala/scala-dev#143: // a call `super[T].m` that resolves to `A.m` cannot be translated to correct bytecode if // `A` is a class (not a trait / interface), but not the direct superclass. Invokespecial @@ -150,15 +145,12 @@ class SuperAccessors(thisPhase: DenotTransformer) { em"""Super call cannot be emitted: the selected $sym is declared in $owner, which is not the direct superclass of $clazz. |An unqualified super call (super.${sym.name}) would be allowed.""", sel.srcPos) - } - } val needAccessor = name.isTermName && ( clazz != currentClass || !validCurrentClass || mix.name.isEmpty && clazz.is(Trait)) if (needAccessor) atPhase(thisPhase.next)(superAccessorCall(sel, mix.name)) else sel - } /** Disallow some super.XX calls targeting Any methods which would * otherwise lead to either a compiler crash or runtime failure. @@ -170,14 +162,14 @@ class SuperAccessors(thisPhase: DenotTransformer) { (sym eq defn.Any_##) /** Transform select node, adding super and protected accessors as needed */ - def transformSelect(tree: Tree, targs: List[Tree])(using Context): Tree = { + def transformSelect(tree: Tree, targs: List[Tree])(using Context): Tree = val sel @ Select(qual, name) = tree: @unchecked val sym = sel.symbol def needsSuperAccessor = ProtectedAccessors.needsAccessorIfNotInSubclass(sym) && AccessProxies.hostForAccessorOf(sym).is(Trait) - qual match { + qual match case _: This if needsSuperAccessor => /* Given a protected member m defined in class C, * and a trait T that calls m. @@ -202,26 +194,20 @@ class SuperAccessors(thisPhase: DenotTransformer) { transformSuperSelect(sel) case _ => sel - } - } /** Wrap template to template transform `op` with needed initialization and finalization */ - def wrapTemplate(tree: Template)(op: Template => Template)(using Context): Template = { + def wrapTemplate(tree: Template)(op: Template => Template)(using Context): Template = accDefs(currentClass) = new mutable.ListBuffer[Tree] val impl = op(tree) val accessors = accDefs.remove(currentClass).nn if (accessors.isEmpty) impl - else { - val (params, rest) = impl.body span { + else + val (params, rest) = impl.body span: case td: TypeDef => !td.isClassDef case vd: ValOrDefDef => vd.symbol.flags.is(ParamAccessor) case _ => false - } cpy.Template(impl)(body = params ++ accessors ++ rest) - } - } /** Wrap `DefDef` producing operation `op`, potentially setting `invalidClass` info */ def wrapDefDef(ddef: DefDef)(op: => DefDef)(using Context): DefDef = if (isMethodWithExtension(ddef.symbol)) withInvalidCurrentClass(op) else op -} diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala index d3a17334caf5..16a3b9a51ff6 100644 --- a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala @@ -30,7 +30,7 @@ object SymUtils: * through the superclass. Traits are given in the order they appear in the * parents clause (which is the reverse of their order in baseClasses) */ - def directlyInheritedTraits(using Context): List[ClassSymbol] = { + def directlyInheritedTraits(using Context): List[ClassSymbol] = val superCls = self.asClass.superClass val baseClasses = self.asClass.baseClasses if (baseClasses.isEmpty) Nil @@ -39,7 +39,6 @@ object SymUtils: case bc :: bcs1 => if bc eq superCls then acc else recur(bcs1, bc :: acc) case nil => acc recur(baseClasses.tail, Nil) - } /** All traits implemented by a class, except for those inherited through the superclass. * The empty list if `self` is a trait. @@ -170,11 +169,11 @@ object SymUtils: s"it is not a sealed ${self.kindString}" else if (!self.isOneOf(AbstractOrTrait)) "it is not an abstract class" - else { + else val children = self.children val companionMirror = self.useCompanionAsSumMirror val ownerScope = if pre.isInstanceOf[SingletonType] then pre.classSymbol else NoSymbol - def problem(child: Symbol) = { + def problem(child: Symbol) = def accessibleMessage(sym: Symbol): String = def inherits(sym: Symbol, scope: Symbol): Boolean = @@ -196,7 +195,7 @@ object SymUtils: if (child == self) "it has anonymous or inaccessible subclasses" else if (!childAccessible.isEmpty) i"its child $child is not accessible $childAccessible" else if (!child.isClass) "" // its a singleton enum value - else { + else val s = child.whyNotGenericProduct if s.isEmpty then s else if child.is(Sealed) then @@ -205,11 +204,8 @@ object SymUtils: else i"its child $child is not a generic sum because $s" else i"its child $child is not a generic product because $s" - } - } if (children.isEmpty) "it does not have subclasses" else children.map(problem).find(!_.isEmpty).getOrElse("") - } def isGenericSum(pre: Type)(using Context): Boolean = whyNotGenericSum(pre).isEmpty @@ -228,13 +224,12 @@ object SymUtils: else NoSymbol /** Apply symbol/symbol substitution to this symbol */ - def subst(from: List[Symbol], to: List[Symbol]): Symbol = { + def subst(from: List[Symbol], to: List[Symbol]): Symbol = @tailrec def loop(from: List[Symbol], to: List[Symbol]): Symbol = if (from.isEmpty) self else if (self eq from.head) to.head else loop(from.tail, to.tail) loop(from, to) - } def accessorNamed(name: TermName)(using Context): Symbol = self.owner.info.decl(name).suchThat(_.is(Accessor)).symbol @@ -253,13 +248,12 @@ object SymUtils: if (self.name.is(TraitSetterName)) self else accessorNamed(Mixin.traitSetterName(self.asTerm)) - def field(using Context): Symbol = { + def field(using Context): Symbol = val thisName = self.name.asTermName val fieldName = if (self.hasAnnotation(defn.ScalaStaticAnnot)) thisName.getterName else thisName.fieldName self.owner.info.decl(fieldName).suchThat(!_.is(Method)).symbol - } /** Is this symbol a constant expression final val? * @@ -282,9 +276,8 @@ object SymUtils: * @pre `self.isConstantExprFinalVal` is true. */ def constExprFinalValConstantType(using Context): ConstantType = - atPhaseNoLater(erasurePhase) { + atPhaseNoLater(erasurePhase): self.info.resultType.asInstanceOf[ConstantType] - } def isField(using Context): Boolean = self.isTerm && !self.is(Method) @@ -292,10 +285,9 @@ object SymUtils: def isEnumCase(using Context): Boolean = self.isAllOf(EnumCase, butNot = JavaDefined) - def withAnnotationsCarrying(from: Symbol, meta: Symbol, orNoneOf: Set[Symbol] = Set.empty)(using Context): self.type = { + def withAnnotationsCarrying(from: Symbol, meta: Symbol, orNoneOf: Set[Symbol] = Set.empty)(using Context): self.type = self.addAnnotations(from.annotationsCarrying(Set(meta), orNoneOf)) self - } def isEnum(using Context): Boolean = self.is(Enum, butNot = JavaDefined) def isEnumClass(using Context): Boolean = isEnum && !self.is(Case) @@ -325,13 +317,12 @@ object SymUtils: self.owner.isTerm /** Is symbol directly or indirectly owned by a term symbol? */ - @tailrec final def isLocal(using Context): Boolean = { + @tailrec final def isLocal(using Context): Boolean = val owner = self.maybeOwner if (!owner.exists) false else if (isLocalToBlock) true else if (owner.is(Package)) false else owner.isLocal - } /** Is symbol a type splice operation? */ def isTypeSplice(using Context): Boolean = diff --git a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala index 0bdf3001bf26..b2c485259f78 100644 --- a/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala +++ b/compiler/src/dotty/tools/dotc/transform/SyntheticMembers.scala @@ -17,7 +17,7 @@ import util.Spans.Span import config.Printers.derive import NullOpsDecorator._ -object SyntheticMembers { +object SyntheticMembers: enum MirrorImpl: case OfProduct(pre: Type) @@ -28,7 +28,6 @@ object SyntheticMembers { /** Attachment recording that an anonymous class should extend Mirror.Product */ val ExtendsSumOrProductMirror: Property.StickyKey[MirrorImpl] = new Property.StickyKey -} /** Synthetic method implementations for case classes, case objects, * and value classes. @@ -52,7 +51,7 @@ object SyntheticMembers { * def equals(other: Any): Boolean * def hashCode(): Int */ -class SyntheticMembers(thisPhase: DenotTransformer) { +class SyntheticMembers(thisPhase: DenotTransformer): import SyntheticMembers._ import ast.tpd._ @@ -63,13 +62,12 @@ class SyntheticMembers(thisPhase: DenotTransformer) { private var myNonJavaEnumValueSymbols: List[Symbol] = Nil private def initSymbols(using Context) = - if (myValueSymbols.isEmpty) { + if (myValueSymbols.isEmpty) myValueSymbols = List(defn.Any_hashCode, defn.Any_equals) myCaseSymbols = defn.caseClassSynthesized myCaseModuleSymbols = myCaseSymbols.filter(_ ne defn.Any_equals) myEnumValueSymbols = List(defn.Product_productPrefix) myNonJavaEnumValueSymbols = myEnumValueSymbols :+ defn.Any_toString :+ defn.Enum_ordinal - } def valueSymbols(using Context): List[Symbol] = { initSymbols; myValueSymbols } def caseSymbols(using Context): List[Symbol] = { initSymbols; myCaseSymbols } @@ -87,7 +85,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { /** If this is a case or value class, return the appropriate additional methods, * otherwise return nothing. */ - def caseAndValueMethods(clazz: ClassSymbol)(using Context): List[Tree] = { + def caseAndValueMethods(clazz: ClassSymbol)(using Context): List[Tree] = val clazzType = clazz.appliedRef lazy val accessors = if (isDerivedValueClass(clazz)) clazz.paramAccessors.take(1) // Tail parameters can only be `erased` @@ -109,7 +107,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { def syntheticDefIfMissing(sym: Symbol): List[Tree] = if (existingDef(sym, clazz).exists) Nil else syntheticDef(sym) :: Nil - def syntheticDef(sym: Symbol): Tree = { + def syntheticDef(sym: Symbol): Tree = val synthetic = sym.copy( owner = clazz, flags = sym.flags &~ Deferred | Synthetic | Override, @@ -151,7 +149,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { else if (isNonJavaEnumValue) identifierRef else forwardToRuntime(vrefss.head) - def syntheticRHS(vrefss: List[List[Tree]])(using Context): Tree = synthetic.name match { + def syntheticRHS(vrefss: List[List[Tree]])(using Context): Tree = synthetic.name match case nme.hashCode_ if isDerivedValueClass(clazz) => valueHashCodeBody case nme.hashCode_ => chooseHashcode case nme.toString_ => toStringBody(vrefss) @@ -163,10 +161,8 @@ class SyntheticMembers(thisPhase: DenotTransformer) { case nme.productPrefix => ownName case nme.productElement => productElementBody(accessors.length, vrefss.head.head) case nme.productElementName => productElementNameBody(accessors.length, vrefss.head.head) - } report.log(s"adding $synthetic to $clazz at ${ctx.phase}") synthesizeDef(synthetic, syntheticRHS) - } /** The class * @@ -184,7 +180,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * } * ``` */ - def productElementBody(arity: Int, index: Tree)(using Context): Tree = { + def productElementBody(arity: Int, index: Tree)(using Context): Tree = // case N => _${N + 1} val cases = 0.until(arity).map { i => val sel = This(clazz).select(nme.selectorName(i), _.info.isParameterless) @@ -192,7 +188,6 @@ class SyntheticMembers(thisPhase: DenotTransformer) { } Match(index, (cases :+ generateIOBECase(index)).toList) - } /** The class * @@ -210,30 +205,27 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * } * ``` */ - def productElementNameBody(arity: Int, index: Tree)(using Context): Tree = { + def productElementNameBody(arity: Int, index: Tree)(using Context): Tree = // case N => // name for case arg N val cases = 0.until(arity).map { i => CaseDef(Literal(Constant(i)), EmptyTree, Literal(Constant(accessors(i).name.toString))) } Match(index, (cases :+ generateIOBECase(index)).toList) - } - def generateIOBECase(index: Tree): CaseDef = { + def generateIOBECase(index: Tree): CaseDef = val ioob = defn.IndexOutOfBoundsException.typeRef // Second constructor of ioob that takes a String argument - def filterStringConstructor(s: Symbol): Boolean = s.info match { + def filterStringConstructor(s: Symbol): Boolean = s.info match case m: MethodType if s.isConstructor && m.paramInfos.size == 1 => m.paramInfos.head.stripNull == defn.StringType case _ => false - } val constructor = ioob.typeSymbol.info.decls.find(filterStringConstructor _).asTerm val stringIndex = Apply(Select(index, nme.toString_), Nil) val error = Throw(New(ioob, constructor, List(stringIndex))) // case _ => throw new IndexOutOfBoundsException(i.toString) CaseDef(Underscore(defn.IntType), EmptyTree, error) - } /** The class * @@ -258,7 +250,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * `@unchecked` is needed for parametric case classes. * */ - def equalsBody(that: Tree)(using Context): Tree = { + def equalsBody(that: Tree)(using Context): Tree = val thatAsClazz = newSymbol(ctx.owner, nme.x_0, SyntheticCase, clazzType, coord = ctx.owner.span) // x$0 def wildcardAscription(tp: Type) = Typed(Underscore(tp), TypeTree(tp)) val pattern = Bind(thatAsClazz, wildcardAscription(AnnotatedType(clazzType, Annotation(defn.UncheckedAnnot, thatAsClazz.span)))) // x$0 @ (_: C @unchecked) @@ -278,11 +270,9 @@ class SyntheticMembers(thisPhase: DenotTransformer) { val defaultCase = CaseDef(Underscore(defn.AnyType), EmptyTree, Literal(Constant(false))) // case _ => false val matchExpr = Match(that, List(matchingCase, defaultCase)) if (isDerivedValueClass(clazz)) matchExpr - else { + else val eqCompare = This(clazz).select(defn.Object_eq).appliedTo(that.cast(defn.ObjectType)) eqCompare or matchExpr - } - } /** The class * @@ -296,10 +286,9 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * def hashCode: Int = x.hashCode() * ``` */ - def valueHashCodeBody(using Context): Tree = { + def valueHashCodeBody(using Context): Tree = assert(accessors.nonEmpty) ref(accessors.head).select(nme.hashCode_).ensureApplied - } /** The class * @@ -353,7 +342,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * } * ``` */ - def caseHashCodeBody(using Context): Tree = { + def caseHashCodeBody(using Context): Tree = val acc = newSymbol(ctx.owner, nme.acc, Mutable | Synthetic, defn.IntType, coord = ctx.owner.span) val accDef = ValDef(acc, Literal(Constant(0xcafebabe))) val mixPrefix = Assign(ref(acc), @@ -362,11 +351,10 @@ class SyntheticMembers(thisPhase: DenotTransformer) { Assign(ref(acc), ref(defn.staticsMethod("mix")).appliedTo(ref(acc), hashImpl(accessor))) val finish = ref(defn.staticsMethod("finalizeHash")).appliedTo(ref(acc), Literal(Constant(accessors.size))) Block(accDef :: mixPrefix :: mixes, finish) - } /** The `hashCode` implementation for given symbol `sym`. */ def hashImpl(sym: Symbol)(using Context): Tree = - defn.scalaClassName(sym.info.finalResultType) match { + defn.scalaClassName(sym.info.finalResultType) match case tpnme.Unit | tpnme.Null => Literal(Constant(0)) case tpnme.Boolean => If(ref(sym), Literal(Constant(1231)), Literal(Constant(1237))) case tpnme.Int => ref(sym) @@ -375,7 +363,6 @@ class SyntheticMembers(thisPhase: DenotTransformer) { case tpnme.Double => ref(defn.staticsMethod("doubleHash")).appliedTo(ref(sym)) case tpnme.Float => ref(defn.staticsMethod("floatHash")).appliedTo(ref(sym)) case _ => ref(defn.staticsMethod("anyHash")).appliedTo(ref(sym)) - } /** The class * @@ -394,7 +381,6 @@ class SyntheticMembers(thisPhase: DenotTransformer) { def canEqualBody(that: Tree, span: Span): Tree = that.isInstance(AnnotatedType(clazzType, Annotation(defn.UncheckedAnnot, span))) symbolsToSynthesize.flatMap(syntheticDefIfMissing) - } private def hasWriteReplace(clazz: ClassSymbol)(using Context): Boolean = clazz.membersNamed(nme.writeReplace) @@ -451,7 +437,7 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * * unless an implementation already exists, otherwise do nothing. */ - def serializableEnumValueMethod(clazz: ClassSymbol)(using Context): List[Tree] = + def serializableEnumValueMethod(clazz: ClassSymbol)(using Context): List[Tree] = if clazz.isEnumValueImplementation && !clazz.derivesFrom(defn.JavaEnumClass) && clazz.isSerializable @@ -504,9 +490,8 @@ class SyntheticMembers(thisPhase: DenotTransformer) { val (tl1, tpts) = constrained(tl, untpd.EmptyTree, alwaysAddTypeVars = true) val targs = for (tpt <- tpts) yield - tpt.tpe match { + tpt.tpe match case tvar: TypeVar => tvar.instantiate(fromBelow = false) - } (baseRef.appliedTo(targs), extractParams(tl.instantiate(targs))) case methTpe => (baseRef, extractParams(methTpe)) @@ -571,57 +556,49 @@ class SyntheticMembers(thisPhase: DenotTransformer) { * On this case the represented class or object is referred to in a pre-existing `MirroredMonoType` * member of the template. */ - def addMirrorSupport(impl: Template)(using Context): Template = { + def addMirrorSupport(impl: Template)(using Context): Template = val clazz = ctx.owner.asClass var newBody = impl.body var newParents = impl.parents - def addParent(parent: Type): Unit = { + def addParent(parent: Type): Unit = newParents = newParents :+ TypeTree(parent) val oldClassInfo = clazz.classInfo val newClassInfo = oldClassInfo.derivedClassInfo( declaredParents = oldClassInfo.declaredParents :+ parent) clazz.copySymDenotation(info = newClassInfo).installAfter(thisPhase) - } - def addMethod(name: TermName, info: Type, cls: Symbol, body: (Symbol, Tree) => Context ?=> Tree): Unit = { + def addMethod(name: TermName, info: Type, cls: Symbol, body: (Symbol, Tree) => Context ?=> Tree): Unit = val meth = newSymbol(clazz, name, Synthetic | Method, info, coord = clazz.coord) - if (!existingDef(meth, clazz).exists) { + if (!existingDef(meth, clazz).exists) meth.enteredAfter(thisPhase) newBody = newBody :+ synthesizeDef(meth, vrefss => body(cls, vrefss.head.head)) - } - } val linked = clazz.linkedClass - lazy val monoType = { + lazy val monoType = val existing = clazz.info.member(tpnme.MirroredMonoType).symbol if (existing.exists && !existing.is(Deferred)) existing - else { + else val monoType = newSymbol(clazz, tpnme.MirroredMonoType, Synthetic, TypeAlias(linked.reachableRawTypeRef), coord = clazz.coord) newBody = newBody :+ TypeDef(monoType).withSpan(ctx.owner.span.focus) monoType.enteredAfter(thisPhase) - } - } def makeSingletonMirror() = addParent(defn.Mirror_SingletonClass.typeRef) - def makeProductMirror(cls: Symbol, optInfo: Option[MirrorImpl.OfProduct]) = { + def makeProductMirror(cls: Symbol, optInfo: Option[MirrorImpl.OfProduct]) = addParent(defn.Mirror_ProductClass.typeRef) addMethod(nme.fromProduct, MethodType(defn.ProductClass.typeRef :: Nil, monoType.typeRef), cls, fromProductBody(_, _, optInfo).ensureConforms(monoType.typeRef)) // t4758.scala or i3381.scala are examples where a cast is needed - } - def makeSumMirror(cls: Symbol, optInfo: Option[MirrorImpl.OfSum]) = { + def makeSumMirror(cls: Symbol, optInfo: Option[MirrorImpl.OfSum]) = addParent(defn.Mirror_SumClass.typeRef) addMethod(nme.ordinal, MethodType(monoType.typeRef :: Nil, defn.IntType), cls, ordinalBody(_, _, optInfo)) - } - if (clazz.is(Module)) { + if (clazz.is(Module)) if (clazz.is(Case)) makeSingletonMirror() else if (linked.isGenericProduct) makeProductMirror(linked, None) else if (linked.isGenericSum(NoType)) makeSumMirror(linked, None) else if (linked.is(Sealed)) derive.println(i"$linked is not a sum because ${linked.whyNotGenericSum(NoType)}") - } else if (impl.removeAttachment(ExtendsSingletonMirror).isDefined) makeSingletonMirror() else @@ -633,16 +610,14 @@ class SyntheticMembers(thisPhase: DenotTransformer) { case _ => cpy.Template(impl)(parents = newParents, body = newBody) - } - def addSyntheticMembers(impl: Template)(using Context): Template = { + def addSyntheticMembers(impl: Template)(using Context): Template = val clazz = ctx.owner.asClass val syntheticMembers = serializableObjectMethod(clazz) ::: serializableEnumValueMethod(clazz) ::: caseAndValueMethods(clazz) checkInlining(syntheticMembers) val impl1 = cpy.Template(impl)(body = syntheticMembers ::: impl.body) if ctx.settings.Yscala2Stdlib.value then impl1 else addMirrorSupport(impl1) - } private def checkInlining(syntheticMembers: List[Tree])(using Context): Unit = if syntheticMembers.exists(_.existsSubTree { @@ -650,4 +625,3 @@ class SyntheticMembers(thisPhase: DenotTransformer) { case tree: Select => tree.symbol.isAllOf(InlineMethod) case _ => false }) then ctx.compilationUnit.needsInlining = true -} diff --git a/compiler/src/dotty/tools/dotc/transform/TailRec.scala b/compiler/src/dotty/tools/dotc/transform/TailRec.scala index 741b9d1627fe..cb1dec64262e 100644 --- a/compiler/src/dotty/tools/dotc/transform/TailRec.scala +++ b/compiler/src/dotty/tools/dotc/transform/TailRec.scala @@ -104,7 +104,7 @@ import dotty.tools.uncheckedNN * ported and heavily modified for dotty by Dmitry Petrashko * moved after erasure and adapted to emit `Labeled` blocks by Sébastien Doeraene */ -class TailRec extends MiniPhase { +class TailRec extends MiniPhase: import tpd._ override def phaseName: String = TailRec.name @@ -113,10 +113,10 @@ class TailRec extends MiniPhase { override def runsAfter: Set[String] = Set(Erasure.name) // tailrec assumes erased types - override def transformDefDef(tree: DefDef)(using Context): Tree = { + override def transformDefDef(tree: DefDef)(using Context): Tree = val method = tree.symbol val mandatory = method.hasAnnotation(defn.TailrecAnnot) - def noTailTransform(failureReported: Boolean) = { + def noTailTransform(failureReported: Boolean) = // FIXME: want to report this error on `tree.nameSpan`, but // because of extension method getting a weird position, it is // better to report on method symbol so there's no overlap. @@ -126,12 +126,11 @@ class TailRec extends MiniPhase { report.error(TailrecNotApplicable(method), method.srcPos) tree - } val isCandidate = method.isEffectivelyFinal && !(method.is(Accessor) || tree.rhs.eq(EmptyTree)) - if (isCandidate) { + if (isCandidate) val enclosingClass = method.enclosingClass.asClass // Note: this can be split in two separate transforms(in different groups), @@ -141,22 +140,19 @@ class TailRec extends MiniPhase { val transformer = new TailRecElimination(method, enclosingClass, tree.termParamss.head.map(_.symbol), mandatory) val rhsSemiTransformed = transformer.transform(tree.rhs) - if (transformer.rewrote) { + if (transformer.rewrote) val varForRewrittenThis = transformer.varForRewrittenThis val rewrittenParamSyms = transformer.rewrittenParamSyms val varsForRewrittenParamSyms = transformer.varsForRewrittenParamSyms - val initialVarDefs = { - val initialParamVarDefs = rewrittenParamSyms.lazyZip(varsForRewrittenParamSyms).map { + val initialVarDefs = + val initialParamVarDefs = rewrittenParamSyms.lazyZip(varsForRewrittenParamSyms).map: (param, local) => ValDef(local.asTerm, ref(param)) - } - varForRewrittenThis match { + varForRewrittenThis match case Some(local) => ValDef(local.asTerm, This(enclosingClass)) :: initialParamVarDefs case none => initialParamVarDefs - } - } - val rhsFullyTransformed = varForRewrittenThis match { + val rhsFullyTransformed = varForRewrittenThis match case Some(localThisSym) => val thisRef = localThisSym.termRef val substitute = new TreeTypeMap( @@ -183,7 +179,6 @@ class TailRec extends MiniPhase { new TreeTypeMap( typeMap = _.subst(rewrittenParamSyms, varsForRewrittenParamSyms.map(_.termRef)) ).transform(rhsSemiTransformed) - } /** Is the RHS a direct recursive tailcall, possibly with swapped arguments or modified pure arguments. * ``` @@ -191,21 +186,18 @@ class TailRec extends MiniPhase { * ``` * where `` are pure arguments or references to parameters in ``. */ - def isInfiniteRecCall(tree: Tree): Boolean = { - def tailArgOrPureExpr(stat: Tree): Boolean = stat match { + def isInfiniteRecCall(tree: Tree): Boolean = + def tailArgOrPureExpr(stat: Tree): Boolean = stat match case stat: ValDef if stat.name.is(TailTempName) || !stat.symbol.is(Mutable) => tailArgOrPureExpr(stat.rhs) case Assign(lhs: Ident, rhs) if lhs.symbol.name.is(TailLocalName) => tailArgOrPureExpr(rhs) case Assign(lhs: Ident, rhs: Ident) => lhs.symbol == rhs.symbol case stat: Ident if stat.symbol.name.is(TailLocalName) => true case _ => tpd.isPureExpr(stat) - } - tree match { + tree match case Typed(expr, _) => isInfiniteRecCall(expr) case Return(Literal(Constant(())), label) => label.symbol == transformer.continueLabel case Block(stats, expr) => stats.forall(tailArgOrPureExpr) && isInfiniteRecCall(expr) case _ => false - } - } if isInfiniteRecCall(rhsFullyTransformed) then report.warning("Infinite recursive call", tree.srcPos) @@ -220,24 +212,20 @@ class TailRec extends MiniPhase { }) ) ) - } else noTailTransform(failureReported = transformer.failureReported) - } else noTailTransform(failureReported = false) - } - class TailRecElimination(method: Symbol, enclosingClass: ClassSymbol, paramSyms: List[Symbol], isMandatory: Boolean) extends TreeMap { + class TailRecElimination(method: Symbol, enclosingClass: ClassSymbol, paramSyms: List[Symbol], isMandatory: Boolean) extends TreeMap: var rewrote: Boolean = false var failureReported: Boolean = false /** The `tailLabelN` label symbol, used to encode a `continue` from the infinite `while` loop. */ private var myContinueLabel: Symbol | Null = _ - def continueLabel(using Context): Symbol = { + def continueLabel(using Context): Symbol = if (myContinueLabel == null) myContinueLabel = newSymbol(method, TailLabelName.fresh(), Label, defn.UnitType) myContinueLabel.uncheckedNN - } /** The local `var` that replaces `this`, if it is modified in at least one recursive call. */ var varForRewrittenThis: Option[Symbol] = None @@ -247,7 +235,7 @@ class TailRec extends MiniPhase { var varsForRewrittenParamSyms: List[Symbol] = Nil private def getVarForRewrittenThis()(using Context): Symbol = - varForRewrittenThis match { + varForRewrittenThis match case Some(sym) => sym case none => val tpe = @@ -256,17 +244,15 @@ class TailRec extends MiniPhase { val sym = newSymbol(method, TailLocalName.fresh(nme.SELF), Synthetic | Mutable, tpe) varForRewrittenThis = Some(sym) sym - } private def getVarForRewrittenParam(param: Symbol)(using Context): Symbol = - rewrittenParamSyms.indexOf(param) match { + rewrittenParamSyms.indexOf(param) match case -1 => val sym = newSymbol(method, TailLocalName.fresh(param.name.toTermName), Synthetic | Mutable, param.info) rewrittenParamSyms ::= param varsForRewrittenParamSyms ::= sym sym case index => varsForRewrittenParamSyms(index) - } /** Symbols of Labeled blocks that are in tail position. */ private var tailPositionLabeledSyms = LinearSet.empty[Symbol] @@ -276,12 +262,11 @@ class TailRec extends MiniPhase { /** Rewrite this tree to contain no tail recursive calls */ def transform(tree: Tree, tailPosition: Boolean)(using Context): Tree = if (inTailPosition == tailPosition) transform(tree) - else { + else val saved = inTailPosition inTailPosition = tailPosition try transform(tree) finally inTailPosition = saved - } def yesTailTransform(tree: Tree)(using Context): Tree = transform(tree, tailPosition = true) @@ -292,30 +277,27 @@ class TailRec extends MiniPhase { def noTailTransforms[Tr <: Tree](trees: List[Tr])(using Context): List[Tr] = trees.mapConserve(noTailTransform).asInstanceOf[List[Tr]] - override def transform(tree: Tree)(using Context): Tree = { + override def transform(tree: Tree)(using Context): Tree = /* Rewrite an Apply to be considered for tail call transformation. */ - def rewriteApply(tree: Apply): Tree = { + def rewriteApply(tree: Apply): Tree = val arguments = noTailTransforms(tree.args) def continue = cpy.Apply(tree)(noTailTransform(tree.fun), arguments) - def fail(reason: String) = { - if (isMandatory) { + def fail(reason: String) = + if (isMandatory) failureReported = true report.error(em"Cannot rewrite recursive call: $reason", tree.srcPos) - } else tailrec.println("Cannot rewrite recursive call at: " + tree.span + " because: " + reason) continue - } val calledMethod = tree.fun.symbol - val prefix = tree.fun match { + val prefix = tree.fun match case Select(qual, _) => qual case x: Ident if x.symbol eq method => EmptyTree case x => x - } val isRecursiveCall = calledMethod eq method def isRecursiveSuperCall = (method.name eq calledMethod.name) && @@ -323,7 +305,7 @@ class TailRec extends MiniPhase { enclosingClass.appliedRef.widen <:< prefix.tpe.widenDealias if (isRecursiveCall) - if (inTailPosition) { + if (inTailPosition) tailrec.println("Rewriting tail recursive call: " + tree.span) rewrote = true @@ -346,7 +328,7 @@ class TailRec extends MiniPhase { case _ => (getVarForRewrittenThis(), noTailTransform(prefix)) :: assignParamPairs - val assignments = assignThisAndParamPairs match { + val assignments = assignThisAndParamPairs match case (lhs, rhs) :: Nil => Assign(ref(lhs), rhs) :: Nil case _ :: _ => @@ -357,7 +339,6 @@ class TailRec extends MiniPhase { tempValDefs ::: assigns case nil => Nil - } /* The `Typed` node is necessary to perfectly preserve the type of the node. * Without it, lubbing in enclosing if/else or match can infer a different type, @@ -365,15 +346,13 @@ class TailRec extends MiniPhase { */ val tpt = TypeTree(method.info.resultType) seq(assignments, Typed(Return(unitLiteral.withSpan(tree.span), continueLabel), tpt)) - } else fail("it is not in tail position") else if (isRecursiveSuperCall) fail("it targets a supertype") else continue - } - tree match { + tree match case tree @ Apply(fun, args) => val meth = fun.symbol if (meth == defn.Boolean_|| || meth == defn.Boolean_&&) @@ -449,12 +428,7 @@ class TailRec extends MiniPhase { case _ => super.transform(tree) - } - } - } -} -object TailRec { +object TailRec: val name: String = "tailrec" val description: String = "rewrite tail recursion to loops" -} diff --git a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala index ffed65f7676e..1328ef55f910 100644 --- a/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala +++ b/compiler/src/dotty/tools/dotc/transform/TransformWildcards.scala @@ -11,7 +11,7 @@ import ast.tpd * `val x : T = _` to `val x : T = ` * */ -class TransformWildcards extends MiniPhase with IdentityDenotTransformer { +class TransformWildcards extends MiniPhase with IdentityDenotTransformer: import tpd._ override def phaseName: String = TransformWildcards.name @@ -19,15 +19,13 @@ class TransformWildcards extends MiniPhase with IdentityDenotTransformer { override def description: String = TransformWildcards.description override def checkPostCondition(tree: Tree)(using Context): Unit = - tree match { + tree match case vDef: ValDef => assert(!tpd.isWildcardArg(vDef.rhs)) case _ => - } override def transformValDef(tree: ValDef)(using Context): Tree = if (ctx.owner.isClass) tree else cpy.ValDef(tree)(rhs = tree.rhs.wildcardToDefault) -} object TransformWildcards: val name: String = "transformWildcards" diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index fcb20ea920e9..4131d3f9fcd2 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -37,7 +37,7 @@ import scala.util.control.NonFatal * - After typer, identifiers and select nodes refer to terms only (all types should be * represented as TypeTrees then). */ -class TreeChecker extends Phase with SymTransformer { +class TreeChecker extends Phase with SymTransformer: import ast.tpd._ import TreeChecker._ @@ -46,34 +46,30 @@ class TreeChecker extends Phase with SymTransformer { val NoSuperClassFlags: FlagSet = Trait | Package - def testDuplicate(sym: Symbol, registry: mutable.Map[String, Symbol], typ: String)(using Context): Unit = { + def testDuplicate(sym: Symbol, registry: mutable.Map[String, Symbol], typ: String)(using Context): Unit = val name = sym.javaClassName val isDuplicate = this.flatClasses && registry.contains(name) assert(!isDuplicate, s"$typ defined twice $sym ${sym.id} ${registry(name).id}") registry(name) = sym - } - def checkCompanion(symd: SymDenotation)(using Context): Unit = { + def checkCompanion(symd: SymDenotation)(using Context): Unit = val cur = symd.linkedClass - val prev = atPhase(ctx.phase.prev) { + val prev = atPhase(ctx.phase.prev): symd.symbol.linkedClass - } if (prev.exists) assert(cur.exists || prev.is(ConstructorProxy), i"companion disappeared from $symd") - } - def transformSym(symd: SymDenotation)(using Context): SymDenotation = { + def transformSym(symd: SymDenotation)(using Context): SymDenotation = val sym = symd.symbol - if (sym.isClass && !sym.isAbsent()) { + if (sym.isClass && !sym.isAbsent()) val validSuperclass = sym.isPrimitiveValueClass || defn.syntheticCoreClasses.contains(sym) || (sym eq defn.ObjectClass) || sym.isOneOf(NoSuperClassFlags) || (sym.asClass.superClass.exists) || sym.isRefinementClass assert(validSuperclass, i"$sym has no superclass set") testDuplicate(sym, seenClasses, "class") - } val badDeferredAndPrivate = sym.is(Method) && sym.is(Deferred) && sym.is(Private) @@ -86,7 +82,7 @@ class TreeChecker extends Phase with SymTransformer { // Signatures are used to disambiguate overloads and need to stay stable // until erasure, see the comment above `Compiler#phases`. - if (ctx.phaseId <= erasurePhase.id) { + if (ctx.phaseId <= erasurePhase.id) val initial = symd.initial assert(symd == initial || symd.signature == initial.signature, i"""Signature of ${sym} in ${sym.ownersIterator.toList}%, % changed at phase ${ctx.phase.prevMega} @@ -94,10 +90,8 @@ class TreeChecker extends Phase with SymTransformer { |Initial sig : ${initial.signature} |Current info: ${symd.info} |Current sig : ${symd.signature}""") - } symd - } def phaseName: String = "Ycheck" @@ -107,41 +101,36 @@ class TreeChecker extends Phase with SymTransformer { else if (ctx.phase.prev.isCheckable) check(ctx.base.allPhases.toIndexedSeq, ctx) - def check(phasesToRun: Seq[Phase], ctx: Context): Tree = { + def check(phasesToRun: Seq[Phase], ctx: Context): Tree = val fusedPhase = ctx.phase.prevMega(using ctx) report.echo(s"checking ${ctx.compilationUnit} after phase ${fusedPhase}")(using ctx) - inContext(ctx) { + inContext(ctx): assert(ctx.typerState.constraint.domainLambdas.isEmpty, i"non-empty constraint at end of $fusedPhase: ${ctx.typerState.constraint}, ownedVars = ${ctx.typerState.ownedVars.toList}%, %") assertSelectWrapsNew(ctx.compilationUnit.tpdTree) TreeNodeChecker.traverse(ctx.compilationUnit.tpdTree) - } val checkingCtx = ctx .fresh .setReporter(new ThrowingReporter(ctx.reporter)) - val checker = inContext(ctx) { + val checker = inContext(ctx): new Checker(previousPhases(phasesToRun.toList)) - } try checker.typedExpr(ctx.compilationUnit.tpdTree)(using checkingCtx) - catch { + catch case NonFatal(ex) => //TODO CHECK. Check that we are bootstrapped - inContext(checkingCtx) { + inContext(checkingCtx): println(i"*** error while checking ${ctx.compilationUnit} after phase ${ctx.phase.prevMega(using ctx)} ***") - } throw ex - } - } /** * Checks that `New` nodes are always wrapped inside `Select` nodes. */ def assertSelectWrapsNew(tree: Tree)(using Context): Unit = (new TreeAccumulator[tpd.Tree] { - override def apply(parent: Tree, tree: Tree)(using Context): Tree = { - tree match { + override def apply(parent: Tree, tree: Tree)(using Context): Tree = + tree match case tree: New if !parent.isInstanceOf[tpd.Select] => assert(assertion = false, i"`New` node must be wrapped in a `Select` of the constructor:\n parent = ${parent.show}\n child = ${tree.show}") case _: Annotated => @@ -149,20 +138,17 @@ class TreeChecker extends Phase with SymTransformer { // somewhat invalid trees. case _ => foldOver(tree, tree) // replace the parent when folding over the children - } parent // return the old parent so that my siblings see it - } })(tpd.EmptyTree, tree) -} -object TreeChecker { +object TreeChecker: /** - Check that TypeParamRefs and MethodParams refer to an enclosing type. * - Check that all type variables are instantiated. */ def checkNoOrphans(tp0: Type, tree: untpd.Tree = untpd.EmptyTree)(using Context): Type = new TypeMap() { val definedBinders = new java.util.IdentityHashMap[Type, Any] - def apply(tp: Type): Type = { - tp match { + def apply(tp: Type): Type = + tp match case tp: BindingType => definedBinders.put(tp, tp) mapOver(tp) @@ -174,9 +160,7 @@ object TreeChecker { apply(tp.underlying) case _ => mapOver(tp) - } tp - } }.apply(tp0) /** Run some additional checks on the nodes of the trees. Specifically: @@ -205,7 +189,7 @@ object TreeChecker { private[TreeChecker] def isValidJVMMethodName(name: Name): Boolean = name.toString.forall(isValidJVMMethodChar) - class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking { + class Checker(phasesToCheck: Seq[Phase]) extends ReTyper with Checking: import ast.tpd._ protected val nowDefinedSyms = util.HashSet[Symbol]() @@ -215,45 +199,40 @@ object TreeChecker { // don't check value classes after typer, as the constraint about constructors doesn't hold after transform override def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(using Context): Unit = () - def withDefinedSyms[T](trees: List[untpd.Tree])(op: => T)(using Context): T = { + def withDefinedSyms[T](trees: List[untpd.Tree])(op: => T)(using Context): T = var locally = List.empty[Symbol] - for (tree <- trees) { + for (tree <- trees) val sym = tree.symbol - tree match { + tree match case tree: untpd.DefTree => assert(isValidJVMName(sym.name.encode), s"${sym.name.debugString} name is invalid on jvm") - everDefinedSyms.get(sym) match { + everDefinedSyms.get(sym) match case Some(t) => if (t ne tree) report.warning(i"symbol ${sym.fullName} is defined at least twice in different parts of AST") // should become an error case None => everDefinedSyms(sym) = tree - } assert(!nowDefinedSyms.contains(sym), i"doubly defined symbol: ${sym.fullName} in $tree") if (ctx.settings.YcheckMods.value) - tree match { + tree match case t: untpd.MemberDef => if (t.name ne sym.name) report.warning(s"symbol ${sym.fullName} name doesn't correspond to AST: ${t}") // todo: compare trees inside annotations case _ => - } locally = sym :: locally nowDefinedSyms += sym case _ => - } - } val res = op nowDefinedSyms --= locally res - } /** The following invariant holds: * * patBoundSyms.contains(sym) <=> sym.isPatternBound */ - def withPatSyms[T](syms: List[Symbol])(op: => T)(using Context): T = { + def withPatSyms[T](syms: List[Symbol])(op: => T)(using Context): T = syms.foreach { sym => assert( sym.isPatternBound, @@ -265,20 +244,18 @@ object TreeChecker { val res = op patBoundSyms --= syms res - } // used to check invariant of lambda encoding var nestingBlock: untpd.Block | Null = null - private def withBlock[T](block: untpd.Block)(op: => T): T = { + private def withBlock[T](block: untpd.Block)(op: => T): T = val outerBlock = nestingBlock nestingBlock = block val res = op nestingBlock = outerBlock res - } def assertDefined(tree: untpd.Tree)(using Context): Unit = - if (tree.symbol.maybeOwner.isTerm) { + if (tree.symbol.maybeOwner.isTerm) val sym = tree.symbol assert( nowDefinedSyms.contains(sym) || patBoundSyms.contains(sym), @@ -290,17 +267,15 @@ object TreeChecker { !sym.isPatternBound || patBoundSyms.contains(sym), i"sym.isPatternBound => patBoundSyms.contains(sym) is broken, sym = $sym, line " + tree.srcPos.line ) - } /** assert Java classes are not used as objects */ - def assertIdentNotJavaClass(tree: Tree)(using Context): Unit = tree match { + def assertIdentNotJavaClass(tree: Tree)(using Context): Unit = tree match case _ : untpd.Ident => assert(!tree.symbol.isAllOf(JavaModule), "Java class can't be used as value: " + tree) case _ => - } /** check Java classes are not used as objects */ - def checkIdentNotJavaClass(tree: Tree)(using Context): Unit = tree match { + def checkIdentNotJavaClass(tree: Tree)(using Context): Unit = tree match // case tree: untpd.Ident => // case tree: untpd.Select => // case tree: untpd.Bind => @@ -351,7 +326,6 @@ object TreeChecker { case Annotated(arg, _) => assertIdentNotJavaClass(arg) case _ => - } /** Exclude from double definition checks any erased symbols that were * made `private` in phase `UnlinkErasedDecls`. These symbols will be removed @@ -376,7 +350,7 @@ object TreeChecker { assert(false, s"The type of a non-Super tree must not be a SuperType, but $tree has type $tp") case _ => - override def typed(tree: untpd.Tree, pt: Type = WildcardType)(using Context): Tree = { + override def typed(tree: untpd.Tree, pt: Type = WildcardType)(using Context): Tree = val tpdTree = super.typed(tree, pt) Typer.assertPositioned(tree) checkSuper(tpdTree) @@ -385,10 +359,9 @@ object TreeChecker { // Erasure (because running it in Typer would force too much) checkIdentNotJavaClass(tpdTree) tpdTree - } - override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = { - val res = tree match { + override def typedUnadapted(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = + val res = tree match case _: untpd.TypedSplice | _: untpd.Thicket | _: EmptyValDef[?] => super.typedUnadapted(tree, pt, locked) case _ if tree.isType => @@ -409,39 +382,34 @@ object TreeChecker { if (tree.hasType) // it might not be typed because Typer sometimes constructs new untyped trees and resubmits them to typedUnadapted assert(isSubType(tree1.tpe, tree.typeOpt), divergenceMsg(tree1.tpe, tree.typeOpt)) tree1 - } checkNoOrphans(res.tpe) phasesToCheck.foreach(_.checkPostCondition(res)) res - } - def checkNotRepeated(tree: Tree)(using Context): tree.type = { + def checkNotRepeated(tree: Tree)(using Context): tree.type = def allowedRepeated = tree.tpe.widen.isRepeatedParam assert(!tree.tpe.widen.isRepeatedParam || allowedRepeated, i"repeated parameter type not allowed here: $tree") tree - } /** Check that all methods have MethodicType */ - def isMethodType(pt: Type)(using Context): Boolean = pt match { + def isMethodType(pt: Type)(using Context): Boolean = pt match case at: AnnotatedType => isMethodType(at.parent) case _: MethodicType => true // MethodType, ExprType, PolyType case _ => false - } - override def typedIdent(tree: untpd.Ident, pt: Type)(using Context): Tree = { + override def typedIdent(tree: untpd.Ident, pt: Type)(using Context): Tree = assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase) assert(tree.isType || ctx.mode.is(Mode.Pattern) && untpd.isWildcardArg(tree) || !needsSelect(tree.typeOpt), i"bad type ${tree.tpe} for $tree # ${tree.uniqueId}") assertDefined(tree) checkNotRepeated(super.typedIdent(tree, pt)) - } /** Makes sure the symbol in the tree can be approximately reconstructed by * calling `member` on the qualifier type. * Approximately means: The two symbols might be different but one still overrides the other. */ - override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { + override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = assert(tree.isTerm || !ctx.isAfterTyper, tree.show + " at " + ctx.phase) val tpe = tree.typeOpt @@ -458,13 +426,12 @@ object TreeChecker { assert(denot.symbol.exists, i"Denotation $denot of selection $tree with type $tpe does not have a symbol, qualifier type = ${tree.qualifier.typeOpt}") val sym = tree.symbol - val symIsFixed = tpe match { + val symIsFixed = tpe match case tpe: TermRef => ctx.erasedTypes || !tpe.isPrefixDependentMemberRef case _ => false - } if (sym.exists && !sym.is(Private) && !symIsFixed && - !isOuterSelect) { // outer selects have effectively fixed symbols + !isOuterSelect) // outer selects have effectively fixed symbols val qualTpe = tree.qualifier.typeOpt val member = if (sym.is(Private)) qualTpe.member(tree.name) @@ -474,22 +441,19 @@ object TreeChecker { sym == mbr || sym.overriddenSymbol(mbr.owner.asClass) == mbr || mbr.overriddenSymbol(sym.owner.asClass) == sym), - i"""symbols differ for $tree + i"""symbols differ for $tree |was : $sym |alternatives by type: $memberSyms%, % of types ${memberSyms.map(_.info)}%, % |qualifier type : ${qualTpe} |tree type : ${tree.typeOpt} of class ${tree.typeOpt.getClass}""") - } checkNotRepeated(super.typedSelect(tree, pt)) - } - override def typedThis(tree: untpd.This)(using Context): Tree = { + override def typedThis(tree: untpd.This)(using Context): Tree = val res = super.typedThis(tree) val cls = res.symbol assert(cls.isStaticOwner || ctx.owner.isContainedIn(cls), i"error while typing $tree, ${ctx.owner} is not contained in $cls") res - } override def typedSuper(tree: untpd.Super, pt: Type)(using Context): Tree = assert(tree.qual.typeOpt.isInstanceOf[ThisType], i"expect prefix of Super to be This, actual = ${tree.qual}") @@ -538,21 +502,19 @@ object TreeChecker { typed(tree.expr, pt1) untpd.cpy.Typed(tree)(expr1, tpt1).withType(tree.typeOpt) - private def checkOwner(tree: untpd.Tree)(using Context): Unit = { + private def checkOwner(tree: untpd.Tree)(using Context): Unit = def ownerMatches(symOwner: Symbol, ctxOwner: Symbol): Boolean = symOwner == ctxOwner || ctxOwner.isWeakOwner && ownerMatches(symOwner, ctxOwner.owner) assert(ownerMatches(tree.symbol.owner, ctx.owner), i"bad owner; ${tree.symbol} has owner ${tree.symbol.owner}, expected was ${ctx.owner}\n" + i"owner chain = ${tree.symbol.ownersIterator.toList}%, %, ctxOwners = ${ctx.outersIterator.map(_.owner).toList}%, %") - } - override def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = { + override def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = assert(sym.info.isInstanceOf[ClassInfo | TypeBounds], i"wrong type, expect a template or type bounds for ${sym.fullName}, but found: ${sym.info}") super.typedTypeDef(tdef, sym) - } - override def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = { + override def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = val TypeDef(_, impl @ Template(constr, _, _, _)) = cdef: @unchecked assert(cdef.symbol == cls) assert(impl.symbol.owner == cls) @@ -579,7 +541,6 @@ object TreeChecker { i"defined: ${defined}%, %") super.typedClassDef(cdef, cls) - } override def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = def defParamss = ddef.paramss.filter(!_.isEmpty).nestedMap(_.symbol) @@ -589,7 +550,7 @@ object TreeChecker { i"""param mismatch for ${sym.showLocated}: |defined in tree = ${layout(defParamss)} |stored in symbol = ${layout(sym.rawParamss)}""") - withDefinedSyms(ddef.paramss.flatten) { + withDefinedSyms(ddef.paramss.flatten): if (!sym.isClassConstructor && !(sym.name eq nme.STATIC_CONSTRUCTOR)) assert(isValidJVMMethodName(sym.name.encode), s"${sym.name.debugString} name is invalid on jvm") @@ -603,15 +564,13 @@ object TreeChecker { val tpdTree = super.typedDefDef(ddef, sym) assert(isMethodType(sym.info), i"wrong type, expect a method type for ${sym.fullName}, but found: ${sym.info}") tpdTree - } override def typedCase(tree: untpd.CaseDef, sel: Tree, selType: Type, pt: Type)(using Context): CaseDef = - withPatSyms(tpd.patVars(tree.pat.asInstanceOf[tpd.Tree])) { + withPatSyms(tpd.patVars(tree.pat.asInstanceOf[tpd.Tree])): super.typedCase(tree, sel, selType, pt) - } - override def typedClosure(tree: untpd.Closure, pt: Type)(using Context): Tree = { - if (!ctx.phase.lambdaLifted) nestingBlock match { + override def typedClosure(tree: untpd.Closure, pt: Type)(using Context): Tree = + if (!ctx.phase.lambdaLifted) nestingBlock match case block @ Block((meth : untpd.DefDef) :: Nil, closure: untpd.Closure) => assert(meth.symbol == closure.meth.symbol, "closure.meth symbol not equal to method symbol. Block: " + block.show) @@ -620,9 +579,7 @@ object TreeChecker { case null => assert(false, "function literal are not properly formed as a block of DefDef and Closure. Found: " + tree.show + " Nesting block: null") - } super.typedClosure(tree, pt) - } override def typedBlock(tree: untpd.Block, pt: Type)(using Context): Tree = withBlock(tree) { withDefinedSyms(tree.stats) { super.typedBlock(tree, pt) } } @@ -637,33 +594,28 @@ object TreeChecker { * is that we should be able to pull out an expression as an initializer * of a helper value without having to do a change owner traversal of the expression. */ - override def typedStats(trees: List[untpd.Tree], exprOwner: Symbol)(using Context): (List[Tree], Context) = { - for (tree <- trees) tree match { + override def typedStats(trees: List[untpd.Tree], exprOwner: Symbol)(using Context): (List[Tree], Context) = + for (tree <- trees) tree match case tree: untpd.DefTree => checkOwner(tree) case _: untpd.Thicket => assert(false, i"unexpanded thicket $tree in statement sequence $trees%\n%") case _ => - } super.typedStats(trees, exprOwner) - } - override def typedLabeled(tree: untpd.Labeled)(using Context): Labeled = { + override def typedLabeled(tree: untpd.Labeled)(using Context): Labeled = checkOwner(tree.bind) withDefinedSyms(tree.bind :: Nil) { super.typedLabeled(tree) } - } - override def typedReturn(tree: untpd.Return)(using Context): Return = { + override def typedReturn(tree: untpd.Return)(using Context): Return = val tree1 = super.typedReturn(tree) val from = tree1.from val fromSym = from.symbol if (fromSym.is(Label)) assertDefined(from) tree1 - } - override def typedWhileDo(tree: untpd.WhileDo)(using Context): Tree = { + override def typedWhileDo(tree: untpd.WhileDo)(using Context): Tree = assert((tree.cond ne EmptyTree) || ctx.phase.refChecked, i"invalid empty condition in while at $tree") super.typedWhileDo(tree) - } override def typedPackageDef(tree: untpd.PackageDef)(using Context): Tree = if tree.symbol == defn.StdLibPatchesPackage then @@ -699,7 +651,7 @@ object TreeChecker { assert(!tree.expr.isInstanceOf[untpd.Quote] || inInlineMethod, i"missed quote cancellation in $tree") super.typedSplice(tree, pt) - override def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = { + override def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = val tree1 @ Hole(isTerm, idx, args, content) = super.typedHole(tree, pt): @unchecked assert(idx >= 0, i"hole should not have negative index: $tree") @@ -736,21 +688,19 @@ object TreeChecker { assert(content.typeOpt =:= expectedContentType, i"unexpected content of hole\nexpected: ${expectedContentType}\nwas: ${content.typeOpt}") tree1 - } override def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: => List[Symbol])(using Context): Tree = tree - override def adapt(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = { + override def adapt(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = def isPrimaryConstructorReturn = ctx.owner.isPrimaryConstructor && pt.isRef(ctx.owner.owner) && tree.tpe.isRef(defn.UnitClass) - def infoStr(tp: Type) = tp match { + def infoStr(tp: Type) = tp match case tp: TypeRef => val sym = tp.symbol i"${sym.showLocated} with ${tp.designator}, flags = ${sym.flagsString}, underlying = ${tp.underlyingIterator.toList}%, %" case _ => "??" - } if (ctx.mode.isExpr && !tree.isEmpty && !isPrimaryConstructorReturn && @@ -763,10 +713,8 @@ object TreeChecker { |tree = $tree""".stripMargin }) tree - } override def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = tree - } /** Tree checker that can be applied to a local tree. */ class LocalChecker(phasesToCheck: Seq[Phase]) extends Checker(phasesToCheck: Seq[Phase]): @@ -808,7 +756,7 @@ object TreeChecker { original ) - private[TreeChecker] def previousPhases(phases: List[Phase])(using Context): List[Phase] = phases match { + private[TreeChecker] def previousPhases(phases: List[Phase])(using Context): List[Phase] = phases match case (phase: MegaPhase) :: phases1 => val subPhases = phase.miniPhases val previousSubPhases = previousPhases(subPhases.toList) @@ -818,5 +766,3 @@ object TreeChecker { phase :: previousPhases(phases1) case _ => Nil - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala index aec44d5987bf..11800a4505ee 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeExtractors.scala @@ -6,39 +6,35 @@ import core._ import Contexts._, Trees._, Types._, StdNames._, Symbols._ import ValueClasses._ -object TreeExtractors { +object TreeExtractors: import tpd._ /** Match arg1.op(arg2) and extract (arg1, op.symbol, arg2) */ - object BinaryOp { - def unapply(t: Tree)(using Context): Option[(Tree, Symbol, Tree)] = t match { + object BinaryOp: + def unapply(t: Tree)(using Context): Option[(Tree, Symbol, Tree)] = t match case Apply(sel @ Select(arg1, _), List(arg2)) => Some((arg1, sel.symbol, arg2)) case _ => None - } - } /** Match new C(args) and extract (C, args). * Also admit new C(args): T and {new C(args)}. */ - object NewWithArgs { - def unapply(t: Tree)(using Context): Option[(Type, List[Tree])] = t match { + object NewWithArgs: + def unapply(t: Tree)(using Context): Option[(Type, List[Tree])] = t match case Apply(Select(New(_), nme.CONSTRUCTOR), args) => Some((t.tpe, args)) case Typed(expr, _) => unapply(expr) case Block(Nil, expr) => unapply(expr) case _ => None - } - } /** For an instance v of a value class like: * class V(val underlying: X) extends AnyVal * Match v.underlying() and extract v */ - object ValueClassUnbox { - def unapply(t: Tree)(using Context): Option[Tree] = t match { + object ValueClassUnbox: + def unapply(t: Tree)(using Context): Option[Tree] = t match case Apply(sel @ Select(ref, _), Nil) => val sym = ref.tpe.widenDealias.typeSymbol if (isDerivedValueClass(sym) && (sel.symbol eq valueClassUnbox(sym.asClass))) @@ -47,6 +43,3 @@ object TreeExtractors { None case _ => None - } - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala index 92d22b1cc57e..af4cc2561949 100644 --- a/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala +++ b/compiler/src/dotty/tools/dotc/transform/TryCatchPatterns.scala @@ -38,7 +38,7 @@ import dotty.tools.dotc.util.Spans.Span * - `case _: T =>` where `T` is not `Throwable` * */ -class TryCatchPatterns extends MiniPhase { +class TryCatchPatterns extends MiniPhase: import dotty.tools.dotc.ast.tpd._ override def phaseName: String = TryCatchPatterns.name @@ -47,31 +47,27 @@ class TryCatchPatterns extends MiniPhase { override def runsAfter: Set[String] = Set(ElimRepeated.name) - override def checkPostCondition(tree: Tree)(using Context): Unit = tree match { + override def checkPostCondition(tree: Tree)(using Context): Unit = tree match case Try(_, cases, _) => - cases.foreach { + cases.foreach: case CaseDef(Typed(_, _), guard, _) => assert(guard.isEmpty, "Try case should not contain a guard.") case CaseDef(Bind(_, _), guard, _) => assert(guard.isEmpty, "Try case should not contain a guard.") case c => assert(isDefaultCase(c), "Pattern in Try should be Bind, Typed or default case.") - } case _ => - } - override def transformTry(tree: Try)(using Context): Tree = { + override def transformTry(tree: Try)(using Context): Tree = val (tryCases, patternMatchCases) = tree.cases.span(isCatchCase) val fallbackCase = mkFallbackPatterMatchCase(patternMatchCases, tree.span) cpy.Try(tree)(cases = tryCases ++ fallbackCase) - } /** Is this pattern node a catch-all or type-test pattern? */ - private def isCatchCase(cdef: CaseDef)(using Context): Boolean = cdef match { + private def isCatchCase(cdef: CaseDef)(using Context): Boolean = cdef match case CaseDef(Typed(Ident(nme.WILDCARD), tpt), EmptyTree, _) => isSimpleThrowable(tpt.tpe) case CaseDef(Bind(_, Typed(Ident(nme.WILDCARD), tpt)), EmptyTree, _) => isSimpleThrowable(tpt.tpe) case _ => isDefaultCase(cdef) - } - private def isSimpleThrowable(tp: Type)(using Context): Boolean = tp.stripped match { + private def isSimpleThrowable(tp: Type)(using Context): Boolean = tp.stripped match case tp @ TypeRef(pre, _) => (pre == NoPrefix || pre.typeSymbol.isStatic) && // Does not require outer class check !tp.symbol.is(Flags.Trait) && // Traits not supported by JVM @@ -80,12 +76,11 @@ class TryCatchPatterns extends MiniPhase { isSimpleThrowable(tp.tycon) case _ => false - } private def mkFallbackPatterMatchCase(patternMatchCases: List[CaseDef], span: Span)( implicit ctx: Context): Option[CaseDef] = if (patternMatchCases.isEmpty) None - else { + else val exName = ExceptionBinderName.fresh() val fallbackSelector = newSymbol(ctx.owner, exName, Flags.Synthetic | Flags.Case, defn.ThrowableType, coord = span) @@ -96,8 +91,6 @@ class TryCatchPatterns extends MiniPhase { EmptyTree, transformFollowing(Match(sel, patternMatchCases ::: rethrow :: Nil))) ) - } -} object TryCatchPatterns: val name: String = "tryCatchPatterns" diff --git a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala index 6fba0bca4ce3..8fdb085f62c7 100644 --- a/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala +++ b/compiler/src/dotty/tools/dotc/transform/TupleOptimizations.scala @@ -14,7 +14,7 @@ import dotty.tools.dotc.ast.tpd /** Optimize generic operations on tuples */ -class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { +class TupleOptimizations extends MiniPhase with IdentityDenotTransformer: import tpd._ override def phaseName: String = TupleOptimizations.name @@ -31,9 +31,9 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { else if (tree.symbol == defn.RuntimeTuples_toArray) transformTupleToArray(tree) else tree - private def transformTupleCons(tree: tpd.Apply)(using Context): Tree = { + private def transformTupleCons(tree: tpd.Apply)(using Context): Tree = val head :: tail :: Nil = tree.args: @unchecked - defn.tupleTypes(tree.tpe.widenTermRefExpr.dealias) match { + defn.tupleTypes(tree.tpe.widenTermRefExpr.dealias) match case Some(tpes) => // Generate a the tuple directly with TupleN+1.apply val size = tpes.size @@ -44,24 +44,21 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { val elements = head :: tupleSelectors(tup, size - 1) knownTupleFromElements(tpes, elements) } - else { + else // val it = Iterator.single(head) ++ tail.asInstanceOf[Product].productIterator // TupleN+1(it.next(), ..., it.next()) val fullIterator = ref(defn.RuntimeTuples_consIterator).appliedToTermArgs(head :: tail :: Nil) evalOnce(fullIterator) { it => knownTupleFromIterator(tpes.length, it).asInstance(tree.tpe) } - } case _ => // No optimization, keep: // scala.runtime.Tuples.cons(tail, head) tree - } - } - private def transformTupleTail(tree: tpd.Apply)(using Context): Tree = { + private def transformTupleTail(tree: tpd.Apply)(using Context): Tree = val Apply(_, tup :: Nil) = tree: @unchecked - defn.tupleTypes(tup.tpe.widenTermRefExpr.dealias, MaxTupleArity + 1) match { + defn.tupleTypes(tup.tpe.widenTermRefExpr.dealias, MaxTupleArity + 1) match case Some(tpes) => // Generate a the tuple directly with TupleN-1.apply val size = tpes.size @@ -93,18 +90,15 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { // No optimization, keep: // scala.runtime.Tuples.tail(tup) tree - } - } private def transformTupleSize(tree: tpd.Apply)(using Context): Tree = - tree.tpe.tryNormalize match { + tree.tpe.tryNormalize match case tp: ConstantType => Literal(tp.value) case _ => tree - } - private def transformTupleConcat(tree: tpd.Apply)(using Context): Tree = { + private def transformTupleConcat(tree: tpd.Apply)(using Context): Tree = val Apply(_, self :: that :: Nil) = tree: @unchecked - (defn.tupleTypes(self.tpe.widenTermRefExpr.dealias), defn.tupleTypes(that.tpe.widenTermRefExpr.dealias)) match { + (defn.tupleTypes(self.tpe.widenTermRefExpr.dealias), defn.tupleTypes(that.tpe.widenTermRefExpr.dealias)) match case (Some(tpes1), Some(tpes2)) => // Generate a the tuple directly with TupleN+M.apply val n = tpes1.size @@ -122,32 +116,28 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { knownTupleFromElements(types, elements) } } - else { + else // val it = self.asInstanceOf[Product].productIterator ++ that.asInstanceOf[Product].productIterator // TupleN+M(it.next(), ..., it.next()) val fullIterator = ref(defn.RuntimeTuples_concatIterator).appliedToTermArgs(tree.args) evalOnce(fullIterator) { it => knownTupleFromIterator(n + m, it).asInstance(tree.tpe) } - } case _ => // No optimization, keep: // scala.runtime.Tuples.cons(self, that) tree - } - } - private def transformTupleApply(tree: tpd.Apply)(using Context): Tree = { + private def transformTupleApply(tree: tpd.Apply)(using Context): Tree = val Apply(_, tup :: nTree :: Nil) = tree: @unchecked - (defn.tupleTypes(tup.tpe.widenTermRefExpr.dealias), nTree.tpe) match { + (defn.tupleTypes(tup.tpe.widenTermRefExpr.dealias), nTree.tpe) match case (Some(tpes), nTpe: ConstantType) => // Get the element directly with TupleM._n+1 or TupleXXL.productElement(n) val size = tpes.size val n = nTpe.value.intValue - if (n < 0 || n >= size) { + if (n < 0 || n >= size) report.error(em"index out of bounds: $n", nTree.underlyingArgument.srcPos) tree - } else if (size <= MaxTupleArity) // tup._n Typed(tup, TypeTree(defn.tupleType(tpes))).select(nme.selectorName(n)) @@ -161,12 +151,10 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { // No optimization, keep: // scala.runtime.Tuples.apply(tup, n) tree - } - } - private def transformTupleToArray(tree: tpd.Apply)(using Context): Tree = { + private def transformTupleToArray(tree: tpd.Apply)(using Context): Tree = val Apply(_, tup :: Nil) = tree: @unchecked - defn.tupleTypes(tup.tpe.widen, MaxTupleArity) match { + defn.tupleTypes(tup.tpe.widen, MaxTupleArity) match case Some(tpes) => val size = tpes.size if (size == 0) @@ -182,22 +170,19 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { // No optimization, keep: // scala.runtime.Tuples.toArray(tup) tree - } - } /** Create a TupleN (1 <= N < 23) from the elements */ - private def knownTupleFromElements(tpes: List[Type], elements: List[Tree])(using Context) = { + private def knownTupleFromElements(tpes: List[Type], elements: List[Tree])(using Context) = val size = elements.size assert(0 < size && size <= MaxTupleArity) val tupleModule = defn.TupleType(size).nn.classSymbol.companionModule ref(tupleModule).select(nme.apply).appliedToTypes(tpes).appliedToTermArgs(elements) - } private def knownTupleFromIterator(size: Int, it: Tree)(using Context): Tree = if (size == 0) // EmptyTuple for empty tuple ref(defn.EmptyTupleModule.termRef) // TODO should this code be here? Or assert(size > specializedSize) - else if (size <= MaxTupleArity) { + else if (size <= MaxTupleArity) // TupleN(it.next(), ..., it.next()) // TODO outline this code for the 22 alternatives (or less, may not need the smallest ones)? @@ -206,7 +191,6 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { val tpes = List.fill(size)(defn.AnyType) val elements = (0 until size).map(_ => it.select(nme.next)).toList knownTupleFromElements(tpes, elements) - } else // No optimization, keep: // TupleXXL.fromIterator(it) @@ -214,7 +198,6 @@ class TupleOptimizations extends MiniPhase with IdentityDenotTransformer { private def tupleSelectors(tup: Tree, size: Int)(using Context): List[Tree] = (0 until size).map(i => tup.select(nme.selectorName(i))).toList -} object TupleOptimizations: val name: String = "genericTuples" diff --git a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala index f5cb8eab73a4..b0dda64b1176 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeTestsCasts.scala @@ -28,7 +28,7 @@ import patmat.Typ * Unfortunately this phase ended up being not Y-checkable unless types are erased. A cast to an ConstantType(3) or x.type * cannot be rewritten before erasure. That's why TypeTestsCasts is called from Erasure. */ -object TypeTestsCasts { +object TypeTestsCasts: import ast.tpd._ import typer.Inferencing.maximizeType import typer.ProtoTypes.constrained @@ -56,7 +56,7 @@ object TypeTestsCasts { * 9. if `X` is `T1 | T2`, checkable(T1, P) && checkable(T2, P). * 10. otherwise, "" */ - def whyUncheckable(X: Type, P: Type, span: Span)(using Context): String = atPhase(Phases.refchecksPhase.next) { + def whyUncheckable(X: Type, P: Type, span: Span)(using Context): String = atPhase(Phases.refchecksPhase.next): extension (inline s1: String) inline def &&(inline s2: String): String = if s1 == "" then s2 else s1 extension (inline b: Boolean) inline def |||(inline s: String): String = if b then "" else s @@ -64,23 +64,21 @@ object TypeTestsCasts { def isAbstract(P: Type) = !P.dealias.typeSymbol.isClass def replaceP(tp: Type)(using Context) = new TypeMap { - def apply(tp: Type) = tp match { + def apply(tp: Type) = tp match case tref: TypeRef if tref.typeSymbol.isPatternBound => WildcardType case AnnotatedType(_, annot) if annot.symbol == defn.UncheckedAnnot => WildcardType case _ => mapOver(tp) - } }.apply(tp) /** Returns true if the type arguments of `P` can be determined from `X` */ - def typeArgsTrivial(X: Type, P: AppliedType)(using Context) = inContext(ctx.fresh.setExploreTyperState().setFreshGADTBounds) { + def typeArgsTrivial(X: Type, P: AppliedType)(using Context) = inContext(ctx.fresh.setExploreTyperState().setFreshGADTBounds): val AppliedType(tycon, _) = P - def underlyingLambda(tp: Type): TypeLambda = tp.ensureLambdaSub match { + def underlyingLambda(tp: Type): TypeLambda = tp.ensureLambdaSub match case tp: TypeLambda => tp case tp: TypeProxy => underlyingLambda(tp.superType) - } val typeLambda = underlyingLambda(tycon) val tvars = constrained(typeLambda, untpd.EmptyTree, alwaysAddTypeVars = true)._2.map(_.tpe) val P1 = tycon.appliedTo(tvars) @@ -95,7 +93,7 @@ object TypeTestsCasts { // conforms to the type skeleton pre.F[_]. Then it goes on to check // if P1 <: P, which means the type arguments in P are trivial, // thus no runtime checks are needed for them. - withMode(Mode.GadtConstraintInference) { + withMode(Mode.GadtConstraintInference): // Why not widen type arguments here? Given the following program // // trait Tree[-T] class Ident[-T] extends Tree[T] @@ -112,7 +110,6 @@ object TypeTestsCasts { debug.println( TypeComparer.explained(_.constrainPatternType(P1, X, forceInvariantRefinement = true)) ) - } // Maximization of the type means we try to cover all possible values // which conform to the skeleton pre.F[_] and X. Then we have to make @@ -130,20 +127,18 @@ object TypeTestsCasts { res - } - def recur(X: Type, P: Type): String = trace(s"recur(${X.show}, ${P.show})") { + def recur(X: Type, P: Type): String = trace(s"recur(${X.show}, ${P.show})"): (X <:< P) ||| P.dealias.match case _: SingletonType => "" case _: TypeProxy if isAbstract(P) => i"it refers to an abstract type member or type parameter" case defn.ArrayOf(tpT) => - X match { + X match case defn.ArrayOf(tpE) => recur(tpE, tpT) case _ => recur(defn.AnyType, tpT) - } case tpe @ AppliedType(tycon, targs) => - X.widenDealias match { + X.widenDealias match case OrType(tp1, tp2) => // This case is required to retrofit type inference, // which cut constraints in the following two cases: @@ -157,7 +152,6 @@ object TypeTestsCasts { TypeComparer.provablyDisjoint(x, tpe.derivedAppliedType(tycon, targs.map(_ => WildcardType))) || typeArgsTrivial(X, tpe) ||| i"its type arguments can't be determined from $X" - } case AndType(tp1, tp2) => recur(X, tp1) && recur(X, tp2) case OrType(tp1, tp2) => recur(X, tp1) && recur(X, tp2) case AnnotatedType(t, _) => recur(X, t) @@ -168,20 +162,18 @@ object TypeTestsCasts { if P.classSymbol.isLocal && foundClasses(X).exists(P.classSymbol.isInaccessibleChildOf) => // 8 i"it's a local class" case _ => "" - } val res = recur(X.widen, replaceP(P)) debug.println(i"checking $X isInstanceOf $P = $res") res - } - def interceptTypeApply(tree: TypeApply)(using Context): Tree = trace(s"transforming ${tree.show}", show = true) { + def interceptTypeApply(tree: TypeApply)(using Context): Tree = trace(s"transforming ${tree.show}", show = true): /** Intercept `expr.xyz[XYZ]` */ def interceptWith(expr: Tree): Tree = if (expr.isEmpty) tree - else { + else val sym = tree.symbol def isPrimitive(tp: Type) = tp.classSymbol.isPrimitiveValueClass @@ -195,16 +187,15 @@ object TypeTestsCasts { def transformIsInstanceOf( expr: Tree, testType: Type, - unboxedTestType: Type, flagUnrelated: Boolean): Tree = { + unboxedTestType: Type, flagUnrelated: Boolean): Tree = def testCls = effectiveClass(testType.widen) def unboxedTestCls = effectiveClass(unboxedTestType.widen) - def unreachable(why: => String)(using Context): Boolean = { + def unreachable(why: => String)(using Context): Boolean = if (flagUnrelated) if (inMatch) report.error(em"this case is unreachable since $why", expr.srcPos) else report.warning(em"this will always yield false since $why", expr.srcPos) false - } /** Are `foundCls` and `testCls` classes that allow checks * whether a test would be always false? @@ -228,7 +219,7 @@ object TypeTestsCasts { def exprType = i"type ${expr.tpe.widen.stripped}" def check(foundCls: Symbol): Boolean = if (!isCheckable(foundCls)) true - else if (!foundCls.derivesFrom(testCls)) { + else if (!foundCls.derivesFrom(testCls)) val unrelated = !testCls.derivesFrom(foundCls) && !unboxedTestCls.derivesFrom(foundCls) @@ -238,7 +229,6 @@ object TypeTestsCasts { else if (unrelated) unreachable(i"$exprType and $testCls are unrelated") else true - } else true end check @@ -253,16 +243,15 @@ object TypeTestsCasts { if (expr.tpe <:< testType) && inMatch then if expr.tpe.isNotNull then constant(expr, Literal(Constant(true))) else expr.testNotNull - else { + else if expr.tpe.isBottomType then report.warning(TypeTestAlwaysDiverges(expr.tpe, testType), tree.srcPos) val nestedCtx = ctx.fresh.setNewTyperState() val foundClsSyms = foundClasses(expr.tpe.widen) val sensical = checkSensical(foundClsSyms)(using nestedCtx) - if (!sensical) { + if (!sensical) nestedCtx.typerState.commit() constant(expr, Literal(Constant(false))) - } else if (testCls.isPrimitiveValueClass) foundClsSyms match case List(cls) if cls.isPrimitiveValueClass => @@ -272,15 +261,12 @@ object TypeTestsCasts { expr, defn.boxedType(testCls.typeRef), testCls.typeRef, flagUnrelated) else derivedTree(expr, defn.Any_isInstanceOf, testType) - } - } - def transformAsInstanceOf(testType: Type): Tree = { + def transformAsInstanceOf(testType: Type): Tree = def testCls = effectiveClass(testType.widen) - def foundClsSymPrimitive = { + def foundClsSymPrimitive = val foundClsSyms = foundClasses(expr.tpe.widen) foundClsSyms.size == 1 && foundClsSyms.head.isPrimitiveValueClass - } if (erasure(expr.tpe) <:< testType) Typed(expr, tree.args.head) // Replace cast by type ascription (which does not generate any bytecode) else if (testCls eq defn.BoxedUnitClass) @@ -293,16 +279,14 @@ object TypeTestsCasts { unbox(expr.ensureConforms(defn.ObjectType), testType) else if (isDerivedValueClass(testCls)) expr // adaptToType in Erasure will do the necessary type adaptation - else if (testCls eq defn.NothingClass) { + else if (testCls eq defn.NothingClass) // In the JVM `x.asInstanceOf[Nothing]` would throw a class cast exception except when `x eq null`. // To avoid this loophole we execute `x` and then regardless of the result throw a `ClassCastException` val throwCCE = Throw(New(defn.ClassCastExceptionClass.typeRef, defn.ClassCastExceptionClass_stringConstructor, Literal(Constant("Cannot cast to scala.Nothing")) :: Nil)) Block(expr :: Nil, throwCCE).withSpan(expr.span) - } else derivedTree(expr, defn.Any_asInstanceOf, testType) - } /** Transform isInstanceOf * @@ -316,7 +300,7 @@ object TypeTestsCasts { * The transform happens before erasure of `testType`, thus cannot be merged * with `transformIsInstanceOf`, which depends on erased type of `testType`. */ - def transformTypeTest(expr: Tree, testType: Type, flagUnrelated: Boolean): Tree = testType.dealias match { + def transformTypeTest(expr: Tree, testType: Type, flagUnrelated: Boolean): Tree = testType.dealias match case tref: TermRef if tref.symbol == defn.EmptyTupleModule => ref(defn.RuntimeTuples_isInstanceOfEmptyTuple).appliedTo(expr) case _: SingletonType => @@ -354,9 +338,8 @@ object TypeTestsCasts { case _ => val erasedTestType = erasure(testType) transformIsInstanceOf(expr, erasedTestType, erasedTestType, flagUnrelated) - } - if (sym.isTypeTest) { + if (sym.isTypeTest) val argType = tree.args.head.tpe val isTrusted = tree.hasAttachment(PatternMatcher.TrustedTypeTestKey) val isUnchecked = expr.tpe.widenTermRefExpr.hasAnnotation(defn.UncheckedAnnot) @@ -366,21 +349,17 @@ object TypeTestsCasts { report.uncheckedWarning(em"the type test for $argType cannot be checked at runtime because $whyNot", expr.srcPos) transformTypeTest(expr, argType, flagUnrelated = enclosingInlineds.isEmpty) // if test comes from inlined code, dont't flag it even if it always false - } else if (sym.isTypeCast) transformAsInstanceOf(erasure(tree.args.head.tpe)) else tree - } - val expr = tree.fun match { + val expr = tree.fun match case Select(expr, _) => expr case i: Ident => val expr = desugarIdentPrefix(i) if (expr.isEmpty) expr else expr.withSpan(i.span) case _ => EmptyTree - } interceptWith(expr) - } private def effectiveClass(tp: Type)(using Context): Symbol = if tp.isRef(defn.PairClass) then effectiveClass(erasure(tp)) @@ -393,4 +372,3 @@ object TypeTestsCasts { case AndType(tp1, tp2) => (for t1 <- go(tp1, Nil); t2 <- go(tp2, Nil) yield AndType(t1, t2)) ::: acc case _ => tp :: acc go(tp, Nil).map(effectiveClass) -} diff --git a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala index a897503ef275..d6fd9dbe6e22 100644 --- a/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/TypeUtils.scala @@ -11,11 +11,11 @@ import Names.Name import dotty.tools.dotc.core.Decorators.* -object TypeUtils { +object TypeUtils: /** A decorator that provides methods on types * that are needed in the transformer pipeline. */ - extension (self: Type) { + extension (self: Type) def isErasedValueType(using Context): Boolean = self.isInstanceOf[ErasedValueType] @@ -39,20 +39,18 @@ object TypeUtils { def isByName: Boolean = self.isInstanceOf[ExprType] - def ensureMethodic(using Context): Type = self match { + def ensureMethodic(using Context): Type = self match case self: MethodicType => self case _ => if (ctx.erasedTypes) MethodType(Nil, self) else ExprType(self) - } - def widenToParents(using Context): Type = self.parents match { + def widenToParents(using Context): Type = self.parents match case Nil => self case ps => ps.reduceLeft(AndType(_, _)) - } /** The arity of this tuple type, which can be made up of EmptyTuple, TupleX and `*:` pairs, * or -1 if this is not a tuple type. */ - def tupleArity(using Context): Int = self/*.dealias*/ match { // TODO: why does dealias cause a failure in tests/run-deep-subtype/Tuple-toArray.scala + def tupleArity(using Context): Int = self/*.dealias*/ match // TODO: why does dealias cause a failure in tests/run-deep-subtype/Tuple-toArray.scala case AppliedType(tycon, _ :: tl :: Nil) if tycon.isRef(defn.PairClass) => val arity = tl.tupleArity if (arity < 0) arity else arity + 1 @@ -65,10 +63,9 @@ object TypeUtils { case _ => if defn.isTupleNType(self) then self.dealias.argInfos.length else -1 - } /** The element types of this tuple type, which can be made up of EmptyTuple, TupleX and `*:` pairs */ - def tupleElementTypes(using Context): Option[List[Type]] = self.dealias match { + def tupleElementTypes(using Context): Option[List[Type]] = self.dealias match case AppliedType(tycon, hd :: tl :: Nil) if tycon.isRef(defn.PairClass) => tl.tupleElementTypes.map(hd :: _) case self: SingletonType => @@ -82,7 +79,6 @@ object TypeUtils { case _ => if defn.isTupleClass(self.typeSymbol) then Some(self.dealias.argInfos) else None - } /** The `*:` equivalent of an instance of a Tuple class */ def toNestedPairs(using Context): Type = @@ -95,7 +91,7 @@ object TypeUtils { /** The TermRef referring to the companion of the underlying class reference * of this type, while keeping the same prefix. */ - def mirrorCompanionRef(using Context): TermRef = self match { + def mirrorCompanionRef(using Context): TermRef = self match case AndType(tp1, tp2) => val c1 = tp1.classSymbol val c2 = tp2.classSymbol @@ -105,7 +101,6 @@ object TypeUtils { prefix.select(self.symbol.companionModule).asInstanceOf[TermRef] case self: TypeProxy => self.superType.mirrorCompanionRef - } /** Is this type a methodic type that takes at least one parameter? */ def takesParams(using Context): Boolean = self.stripPoly match @@ -116,5 +111,3 @@ object TypeUtils { def takesImplicitParams(using Context): Boolean = self.stripPoly match case mt: MethodType => mt.isImplicitMethod || mt.resType.takesImplicitParams case _ => false - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala index 879a885d626e..5229f40ea931 100644 --- a/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala +++ b/compiler/src/dotty/tools/dotc/transform/VCElideAllocations.scala @@ -15,7 +15,7 @@ import TreeExtractors._, ValueClasses._ * new V(u1) == new V(u2) => u1 == u2 provided V does not redefine `equals` * (new V(u)).underlying() => u */ -class VCElideAllocations extends MiniPhase with IdentityDenotTransformer { +class VCElideAllocations extends MiniPhase with IdentityDenotTransformer: import tpd._ override def phaseName: String = VCElideAllocations.name @@ -26,12 +26,11 @@ class VCElideAllocations extends MiniPhase with IdentityDenotTransformer { override def transformApply(tree: Apply)(using Context): Tree = def hasUserDefinedEquals(tp: Type): Boolean = - val eql = atPhase(erasurePhase) { + val eql = atPhase(erasurePhase): defn.Any_equals.matchingMember(tp.typeSymbol.thisType) - } eql.owner != defn.AnyClass && !eql.is(Synthetic) - tree match { + tree match // new V(u1) == new V(u2) => u1 == u2, unless V defines its own equals. // (We don't handle != because it has been eliminated by InterceptedMethods) case BinaryOp(NewWithArgs(tp1, List(u1)), op, NewWithArgs(tp2, List(u2))) @@ -47,8 +46,6 @@ class VCElideAllocations extends MiniPhase with IdentityDenotTransformer { case _ => tree - } -} object VCElideAllocations: val name: String = "vcElideAllocations" diff --git a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala index 219945d4ebb1..534d30313bed 100644 --- a/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala +++ b/compiler/src/dotty/tools/dotc/transform/VCInlineMethods.scala @@ -39,7 +39,7 @@ import ExtensionMethods._, ValueClasses._ * methods (like [[TypeSpecializer]]), this way [[VCInlineMethods]] does not * need to have any knowledge of the name mangling done by other phases. */ -class VCInlineMethods extends MiniPhase with IdentityDenotTransformer { +class VCInlineMethods extends MiniPhase with IdentityDenotTransformer: import tpd._ override def phaseName: String = VCInlineMethods.name @@ -58,7 +58,7 @@ class VCInlineMethods extends MiniPhase with IdentityDenotTransformer { */ private def rewire(tree: Tree, mtArgs: List[Tree] = Nil, mArgss: List[List[Tree]] = Nil) (using Context): Tree = - tree match { + tree match case Apply(qual, mArgs) => rewire(qual, mtArgs, mArgs :: mArgss) case TypeApply(qual, mtArgs2) => @@ -85,12 +85,11 @@ class VCInlineMethods extends MiniPhase with IdentityDenotTransformer { .appliedToTypeTrees(mtArgs) .appliedTo(qual) .appliedToArgss(mArgss) - } /** If this tree corresponds to a fully-applied value class method call, replace it * by a call to the corresponding extension method, otherwise return it as is. */ - private def rewireIfNeeded(tree: Tree)(using Context) = tree.tpe.widen match { + private def rewireIfNeeded(tree: Tree)(using Context) = tree.tpe.widen match case tp: LambdaType => tree // The rewiring will be handled by a fully-applied parent node case _ => @@ -98,7 +97,6 @@ class VCInlineMethods extends MiniPhase with IdentityDenotTransformer { rewire(tree).ensureConforms(tree.tpe).withSpan(tree.span) else tree - } override def transformSelect(tree: Select)(using Context): Tree = rewireIfNeeded(tree) @@ -106,7 +104,6 @@ class VCInlineMethods extends MiniPhase with IdentityDenotTransformer { rewireIfNeeded(tree) override def transformApply(tree: Apply)(using Context): Tree = rewireIfNeeded(tree) -} object VCInlineMethods: val name: String = "vcInlineMethods" diff --git a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala index 28d1255eaa72..12a668605887 100644 --- a/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/ValueClasses.scala @@ -11,15 +11,14 @@ import StdNames._ import SymUtils._ /** Methods that apply to user-defined value classes */ -object ValueClasses { +object ValueClasses: - def isDerivedValueClass(sym: Symbol)(using Context): Boolean = sym.isClass && { + def isDerivedValueClass(sym: Symbol)(using Context): Boolean = sym.isClass `&&`: val d = sym.denot !d.isRefinementClass && d.isValueClass && (d.initial.symbol ne defn.AnyValClass) && // Compare the initial symbol because AnyVal does not exist after erasure !d.isPrimitiveValueClass - } def isMethodWithExtension(sym: Symbol)(using Context): Boolean = val d = sym.denot.initial @@ -53,4 +52,3 @@ object ValueClasses { /** The unboxed type that underlies a derived value class */ def underlyingOfValueClass(sym: ClassSymbol)(using Context): Type = valueClassUnbox(sym).info.resultType -} diff --git a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala index 8080a7c911b3..51176179174c 100644 --- a/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala +++ b/compiler/src/dotty/tools/dotc/transform/YCheckPositions.scala @@ -10,7 +10,7 @@ import dotty.tools.dotc.core.Symbols._ import dotty.tools.dotc.util.SourceFile /** Ycheck inlined positions */ -class YCheckPositions extends Phase { +class YCheckPositions extends Phase: import tpd._ override def phaseName: String = YCheckPositions.name @@ -20,11 +20,11 @@ class YCheckPositions extends Phase { override def run(using Context): Unit = () // YCheck only override def checkPostCondition(tree: Tree)(using Context): Unit = - tree match { + tree match case PackageDef(pid, _) if tree.symbol.owner == defn.RootClass => - val checker = new TreeTraverser { + val checker = new TreeTraverser: private var sources: List[SourceFile] = ctx.source :: Nil - def traverse(tree: tpd.Tree)(using Context): Unit = { + def traverse(tree: tpd.Tree)(using Context): Unit = // Check current context is correct assert(ctx.source == sources.head) @@ -36,8 +36,8 @@ class YCheckPositions extends Phase { assert(tree.source == currentSource, i"wrong source set for $tree # ${tree.uniqueId} of ${tree.getClass}, set to ${tree.source} but context had $currentSource\n ${tree.symbol.flagsString}") // Recursivlely check children while keeping track of current source - reporting.trace(i"check pos ${tree.getClass} ${tree.source} ${sources.head} $tree") { - tree match { + reporting.trace(i"check pos ${tree.getClass} ${tree.source} ${sources.head} $tree"): + tree match case Inlined(EmptyTree, bindings, expansion) => assert(bindings.isEmpty) val old = sources @@ -51,13 +51,8 @@ class YCheckPositions extends Phase { traverse(expansion)(using inlineContext(call).withSource(sources.head)) sources = sources.tail case _ => traverseChildren(tree) - } - } - } - } checker.traverse(tree) case _ => - } private def isMacro(call: Tree)(using Context) = call.symbol.is(Macro) || @@ -66,7 +61,6 @@ class YCheckPositions extends Phase { // TODO remove this distinction once Inline nodes of expanded macros can be trusted (also in Inliner.inlineCallTrace) (!(ctx.phase <= postTyperPhase) && call.isInstanceOf[Select]) -} object YCheckPositions: val name: String = "inlinedPositions" diff --git a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala index 1efb3c88149e..3ff2e56b42de 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Checker.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Checker.scala @@ -51,7 +51,7 @@ class Checker extends Phase: override def traverse(tree: Tree)(using Context): Unit = traverseChildren(tree) - tree match { + tree match case mdef: MemberDef => // self-type annotation ValDef has no symbol if mdef.name != nme.WILDCARD then @@ -64,7 +64,6 @@ class Checker extends Phase: case _ => case _ => - } end InitTreeTraverser object Checker: diff --git a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala index 366fd6be96a2..66496fe48df7 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Errors.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Errors.scala @@ -62,10 +62,9 @@ object Errors: case class UnsafePromotion(msg: String, error: Error)(val trace: Trace) extends Error: def show(using Context): String = msg + stacktrace + "\n" + - "Promoting the value to transitively initialized (Hot) failed due to the following problem:\n" + { + "Promoting the value to transitively initialized (Hot) failed due to the following problem:\n" `+`: val ctx2 = ctx.withProperty(IsFromPromotion, Some(true)) error.show(using ctx2) - } /** Unsafe leaking a non-hot value as constructor arguments * diff --git a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala index 4548dccb598f..4af3910b2a5c 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Semantic.scala @@ -124,14 +124,13 @@ object Semantic: * After populating class parameters and outers, it is possible to lazily * compute the field values in class bodies when they are accessed. */ - private def populateParams(): Contextual[this.type] = log("populating parameters", printer, (_: Warm).objekt.toString) { + private def populateParams(): Contextual[this.type] = log("populating parameters", printer, (_: Warm).objekt.toString): assert(!populatingParams, "the object is already populating parameters") populatingParams = true val tpl = klass.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] extendTrace(klass.defTree) { this.callConstructor(ctor, args.map(arg => new ArgInfo(arg, trace))) } populatingParams = false this - } def ensureObjectExistsAndPopulated(): Contextual[this.type] = if cache.containsObject(this) then this @@ -514,7 +513,7 @@ object Semantic: * * Invariant: fields are immutable and only set once */ - def updateField(field: Symbol, value: Value): Contextual[Unit] = log("set field " + field + " of " + ref + " to " + value) { + def updateField(field: Symbol, value: Value): Contextual[Unit] = log("set field " + field + " of " + ref + " to " + value): val obj = objekt // We may reset the outers or params of a populated warm object. // This is the case if we need access the field of a warm object, which @@ -528,19 +527,17 @@ object Semantic: assert(!obj.hasField(field) || isParamUpdate, field.show + " already init, new = " + value + ", old = " + obj.field(field) + ", ref = " + ref) val obj2 = obj.copy(fields = obj.fields.updated(field, value)) if changed then cache.updateObject(ref, obj2) - } /** Update the immediate outer of the given `klass` of the abstract object * * Invariant: outers are immutable and only set once */ - def updateOuter(klass: ClassSymbol, value: Value): Contextual[Unit] = log("set outer " + klass + " of " + ref + " to " + value) { + def updateOuter(klass: ClassSymbol, value: Value): Contextual[Unit] = log("set outer " + klass + " of " + ref + " to " + value): val obj = objekt // See the comment in `updateField` for setting the value twice. assert(!obj.hasOuter(klass) || obj.outer(klass) == value, klass.show + " already has outer, new = " + value + ", old = " + obj.outer(klass) + ", ref = " + ref) val obj2 = obj.copy(outers = obj.outers.updated(klass, value)) cache.updateObject(ref, obj2) - } end extension extension (value: Value) @@ -548,7 +545,7 @@ object Semantic: value.promote(msg) value - def select(field: Symbol, receiver: Type, needResolve: Boolean = true): Contextual[Value] = log("select " + field.show + ", this = " + value, printer, (_: Value).show) { + def select(field: Symbol, receiver: Type, needResolve: Boolean = true): Contextual[Value] = log("select " + field.show + ", this = " + value, printer, (_: Value).show): if promoted.isCurrentObjectPromoted then Hot else value match case Hot => @@ -602,9 +599,8 @@ object Semantic: case RefSet(refs) => refs.map(_.select(field, receiver)).join - } - def call(meth: Symbol, args: List[ArgInfo], receiver: Type, superType: Type, needResolve: Boolean = true): Contextual[Value] = log("call " + meth.show + ", args = " + args.map(_.value.show), printer, (_: Value).show) { + def call(meth: Symbol, args: List[ArgInfo], receiver: Type, superType: Type, needResolve: Boolean = true): Contextual[Value] = log("call " + meth.show + ", args = " + args.map(_.value.show), printer, (_: Value).show): def promoteArgs(): Contextual[Unit] = args.foreach(_.promote) def isSyntheticApply(meth: Symbol) = @@ -649,7 +645,7 @@ object Semantic: if promoted.isCurrentObjectPromoted then Hot else if isAlwaysSafe(meth) then Hot else if meth eq defn.Any_asInstanceOf then value - else value match { + else value match case Hot => if isSyntheticApply(meth) && meth.hasSource then val klass = meth.owner.companionClass.asClass @@ -700,18 +696,16 @@ object Semantic: outer.instantiate(klass, klass.primaryConstructor, args) else reporter.reportAll(tryReporter.errors) - extendTrace(ddef) { + extendTrace(ddef): eval(ddef.rhs, ref, cls, cacheResult = true) - } else if ref.canIgnoreMethodCall(target) then Hot else // no source code available promoteArgs() // try promoting the receiver as last resort - val hasErrors = Reporter.hasErrors { + val hasErrors = Reporter.hasErrors: ref.promote(ref.show + " has no source code and is not provably transitively initialized (Hot).") - } if hasErrors then val error = CallUnknown(target)(trace) reporter.report(error) @@ -741,10 +735,8 @@ object Semantic: case RefSet(refs) => refs.map(_.call(meth, args, receiver, superType)).join - } - } - def callConstructor(ctor: Symbol, args: List[ArgInfo]): Contextual[Value] = log("call " + ctor.show + ", args = " + args.map(_.value.show), printer, (_: Value).show) { + def callConstructor(ctor: Symbol, args: List[ArgInfo]): Contextual[Value] = log("call " + ctor.show + ", args = " + args.map(_.value.show), printer, (_: Value).show): // init "fake" param fields for parameters of primary and secondary constructors def addParamsAsFields(args: List[Value], ref: Ref, ctorDef: DefDef) = val params = ctorDef.termParamss.flatten.map(_.symbol) @@ -753,7 +745,7 @@ object Semantic: ref.updateField(param, value) printer.println(param.show + " initialized with " + value) - value match { + value match case Hot | Cold | _: RefSet | _: Fun => report.error("[Internal error] unexpected constructor call, meth = " + ctor + ", value = " + value + Trace.show, Trace.position) Hot @@ -795,18 +787,15 @@ object Semantic: val error = CallUnknown(ctor)(trace) reporter.report(error) Hot - } - } /** Handle a new expression `new p.C` where `p` is abstracted by `value` */ - def instantiate(klass: ClassSymbol, ctor: Symbol, args: List[ArgInfo]): Contextual[Value] = log("instantiating " + klass.show + ", value = " + value + ", args = " + args.map(_.value.show), printer, (_: Value).show) { + def instantiate(klass: ClassSymbol, ctor: Symbol, args: List[ArgInfo]): Contextual[Value] = log("instantiating " + klass.show + ", value = " + value + ", args = " + args.map(_.value.show), printer, (_: Value).show): def tryLeak(warm: Warm, nonHotOuterClass: Symbol, argValues: List[Value]): Contextual[Value] = val argInfos2 = args.zip(argValues).map { (argInfo, v) => argInfo.copy(value = v) } - val errors = Reporter.stopEarly { + val errors = Reporter.stopEarly: given Trace = Trace.empty warm.callConstructor(ctor, argInfos2) - } if errors.nonEmpty then val indices = for @@ -822,7 +811,7 @@ object Semantic: warm if promoted.isCurrentObjectPromoted then Hot - else value match { + else value match case Hot => var allHot = true val args2 = args.map { arg => @@ -867,8 +856,6 @@ object Semantic: case RefSet(refs) => refs.map(_.instantiate(klass, ctor, args)).join - } - } end extension extension (ref: Ref) @@ -887,7 +874,7 @@ object Semantic: else if sym.is(Flags.Param) then Hot else - sym.defTree match { + sym.defTree match case vdef: ValDef => // resolve this for local variable val enclosingClass = sym.owner.enclosingClass.asClass @@ -905,7 +892,6 @@ object Semantic: end match case _ => Hot - } end extension // ----- Promotion ---------------------------------------------------- @@ -920,10 +906,10 @@ object Semantic: * object freely, as its fields or outers may still reach uninitialized * objects. */ - def isFullyFilled: Contextual[Boolean] = log("isFullyFilled " + ref, printer) { + def isFullyFilled: Contextual[Boolean] = log("isFullyFilled " + ref, printer): val obj = ref.objekt ref.klass.baseClasses.forall { klass => - !klass.hasSource || { + !klass.hasSource `||`: val nonInits = klass.info.decls.filter { member => !member.isOneOf(Flags.Method | Flags.Lazy | Flags.Deferred) && !member.isType @@ -931,9 +917,7 @@ object Semantic: } printer.println("nonInits = " + nonInits) nonInits.isEmpty - } } - } def nonInitFields(): Contextual[List[Symbol]] = val obj = ref.objekt @@ -951,7 +935,7 @@ object Semantic: end extension extension (thisRef: ThisRef) - def tryPromoteCurrentObject(): Contextual[Boolean] = log("tryPromoteCurrentObject ", printer) { + def tryPromoteCurrentObject(): Contextual[Boolean] = log("tryPromoteCurrentObject ", printer): if promoted.isCurrentObjectPromoted then true else if thisRef.isFullyFilled then @@ -960,11 +944,10 @@ object Semantic: true else false - } extension (value: Value) /** Promotion of values to hot */ - def promote(msg: String): Contextual[Unit] = log("promoting " + value + ", promoted = " + promoted, printer) { + def promote(msg: String): Contextual[Unit] = log("promoting " + value + ", promoted = " + promoted, printer): if !promoted.isCurrentObjectPromoted then value match @@ -990,14 +973,12 @@ object Semantic: case fun @ Fun(body, thisV, klass) => if !promoted.contains(fun) then - val errors = Reporter.stopEarly { - val res = { + val errors = Reporter.stopEarly: + val res = given Trace = Trace.empty eval(body, thisV, klass, cacheResult = true) - } given Trace = Trace.empty.add(body) res.promote("Only transitively initialized (Hot) values can be returned by functions. The function " + fun.show + " returns " + res.show + ".") - } if errors.nonEmpty then reporter.report(UnsafePromotion(msg, errors.head)(trace)) else @@ -1005,7 +986,6 @@ object Semantic: case RefSet(refs) => refs.foreach(_.promote(msg)) - } end extension extension (warm: Warm) @@ -1023,18 +1003,17 @@ object Semantic: * reports a warning to avoid expensive checks. * */ - def tryPromote(msg: String): Contextual[List[Error]] = log("promote " + warm.show + ", promoted = " + promoted, printer) { + def tryPromote(msg: String): Contextual[List[Error]] = log("promote " + warm.show + ", promoted = " + promoted, printer): val obj = warm.objekt def doPromote(klass: ClassSymbol, subClass: ClassSymbol, subClassSegmentHot: Boolean)(using Reporter): Unit = val outer = obj.outer(klass) - val isHotSegment = outer.isHot && { + val isHotSegment = outer.isHot `&&`: val ctor = klass.primaryConstructor val ctorDef = ctor.defTree.asInstanceOf[DefDef] val params = ctorDef.termParamss.flatten.map(_.symbol) // We have cached all parameters on the object params.forall(param => obj.field(param).isHot) - } // Check invariant: subClassSegmentHot ==> isHotSegment // @@ -1056,14 +1035,12 @@ object Semantic: if member.is(Flags.Method, butNot = Flags.Accessor) then val args = member.info.paramInfoss.flatten.map(_ => new ArgInfo(Hot: Value, Trace.empty)) val res = warm.call(member, args, receiver = warm.klass.typeRef, superType = NoType) - withTrace(trace.add(member.defTree)) { + withTrace(trace.add(member.defTree)): res.promote("Could not verify that the return value of " + member.show + " is transitively initialized (Hot). It was found to be " + res.show + ".") - } else val res = warm.select(member, receiver = warm.klass.typeRef) - withTrace(trace.add(member.defTree)) { + withTrace(trace.add(member.defTree)): res.promote("Could not verify that the field " + member.show + " is transitively initialized (Hot). It was found to be " + res.show + ".") - } end for // Promote parents @@ -1077,13 +1054,11 @@ object Semantic: for mixin <- mixins if mixin.hasSource do doPromote(mixin.asClass, klass, isHotSegment) end doPromote - val errors = Reporter.stopEarly { + val errors = Reporter.stopEarly: doPromote(warm.klass, subClass = warm.klass, subClassSegmentHot = false) - } if errors.isEmpty then Nil else UnsafePromotion(msg, errors.head)(trace) :: Nil - } end extension @@ -1110,7 +1085,7 @@ object Semantic: val tpl = classSym.defTree.asInstanceOf[TypeDef].rhs.asInstanceOf[Template] @tailrec - def iterate(): Unit = { + def iterate(): Unit = given Promoted = Promoted.empty(classSym) given Trace = Trace.empty.add(classSym.defTree) given reporter: Reporter.BufferedReporter = new Reporter.BufferedReporter @@ -1130,7 +1105,6 @@ object Semantic: iterate() else cache.prepareForNextClass() - } iterate() end checkClass @@ -1147,9 +1121,8 @@ object Semantic: type ArgInfo = TraceValue[Value] extension (arg: ArgInfo) - def promote: Contextual[Unit] = withTrace(arg.trace) { + def promote: Contextual[Unit] = withTrace(arg.trace): arg.value.promote("Could not verify that the method argument is transitively initialized (Hot). It was found to be " + arg.value.show + ". Only transitively initialized arguments may be passed to methods (except constructors).") - } /** Evaluate an expression with the given value for `this` in a given class `klass` * @@ -1171,9 +1144,8 @@ object Semantic: * @param klass The enclosing class where the expression is located. * @param cacheResult It is used to reduce the size of the cache. */ - def eval(expr: Tree, thisV: Ref, klass: ClassSymbol, cacheResult: Boolean = false): Contextual[Value] = log("evaluating " + expr.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show) { + def eval(expr: Tree, thisV: Ref, klass: ClassSymbol, cacheResult: Boolean = false): Contextual[Value] = log("evaluating " + expr.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show): cache.cachedEval(thisV, expr, cacheResult, default = Hot) { expr => cases(expr, thisV, klass) } - } /** Evaluate a list of expressions */ def eval(exprs: List[Tree], thisV: Ref, klass: ClassSymbol): Contextual[List[Value]] = @@ -1217,10 +1189,9 @@ object Semantic: val args = evalArgs(argss.flatten, thisV, klass) val cls = tref.classSymbol.asClass - withTrace(trace2) { + withTrace(trace2): val outer = outerValue(tref, thisV, klass) outer.instantiate(cls, ctor, args) - } case Call(ref, argss) => // check args @@ -1261,9 +1232,8 @@ object Semantic: case OuterSelectName(_, _) => val current = qualifier.tpe.classSymbol val target = expr.tpe.widenSingleton.classSymbol.asClass - withTrace(trace2) { + withTrace(trace2): resolveThis(target, qual, current.asClass) - } case _ => withTrace(trace2) { qual.select(expr.symbol, receiver = qualifier.tpe) } @@ -1288,14 +1258,12 @@ object Semantic: case Select(qual, _) => eval(qual, thisV, klass) val res = eval(rhs, thisV, klass) - extendTrace(expr) { + extendTrace(expr): res.ensureHot("The RHS of reassignment must be transitively initialized (Hot). It was found to be " + res.show + ". ") - } case id: Ident => val res = eval(rhs, thisV, klass) - extendTrace(expr) { + extendTrace(expr): res.ensureHot("The RHS of reassignment must be transitively initialized (Hot). It was found to be " + res.show + ". ") - } case closureDef(ddef) => Fun(ddef.rhs, thisV, klass) @@ -1316,16 +1284,14 @@ object Semantic: case Match(selector, cases) => val res = eval(selector, thisV, klass) - extendTrace(selector) { + extendTrace(selector): res.ensureHot("The value to be matched needs to be transitively initialized (Hot). It was found to be " + res.show + ". ") - } eval(cases.map(_.body), thisV, klass).join case Return(expr, from) => val res = eval(expr, thisV, klass) - extendTrace(expr) { + extendTrace(expr): res.ensureHot("return expression must be transitively initialized (Hot). It was found to be " + res.show + ". ") - } case WhileDo(cond, body) => eval(cond :: body :: Nil, thisV, klass) @@ -1385,7 +1351,7 @@ object Semantic: * @param thisV The value for `C.this` where `C` is represented by the parameter `klass`. * @param klass The enclosing class where the type `tp` is located. */ - def cases(tp: Type, thisV: Ref, klass: ClassSymbol): Contextual[Value] = log("evaluating " + tp.show, printer, (_: Value).show) { + def cases(tp: Type, thisV: Ref, klass: ClassSymbol): Contextual[Value] = log("evaluating " + tp.show, printer, (_: Value).show): tp match case _: ConstantType => Hot @@ -1420,7 +1386,6 @@ object Semantic: case _ => report.error("[Internal error] unexpected type " + tp + Trace.show, Trace.position) Hot - } /** Resolve C.this that appear in `klass` * @@ -1428,7 +1393,7 @@ object Semantic: * @param thisV The value for `D.this` where `D` is represented by the parameter `klass`. * @param klass The enclosing class where the type `C.this` is located. */ - def resolveThis(target: ClassSymbol, thisV: Value, klass: ClassSymbol): Contextual[Value] = log("resolving " + target.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show) { + def resolveThis(target: ClassSymbol, thisV: Value, klass: ClassSymbol): Contextual[Value] = log("resolving " + target.show + ", this = " + thisV.show + " in " + klass.show, printer, (_: Value).show): if target == klass then thisV else if target.is(Flags.Package) then Hot else @@ -1450,7 +1415,6 @@ object Semantic: Cold case Cold => Cold - } /** Compute the outer value that correspond to `tref.prefix` * @@ -1474,7 +1438,7 @@ object Semantic: * @param thisV The value of the current object to be initialized. * @param klass The class to which the template belongs. */ - def init(tpl: Template, thisV: Ref, klass: ClassSymbol): Contextual[Value] = log("init " + klass.show, printer, (_: Value).show) { + def init(tpl: Template, thisV: Ref, klass: ClassSymbol): Contextual[Value] = log("init " + klass.show, printer, (_: Value).show): val paramsMap = tpl.constr.termParamss.flatten.map { vdef => vdef.name -> thisV.objekt.field(vdef.symbol) }.toMap @@ -1558,9 +1522,8 @@ object Semantic: // To avoid crash we supply hot values for erroneous parent calls. // See tests/neg/i16438.scala. val args: List[ArgInfo] = ctor.info.paramInfoss.flatten.map(_ => new ArgInfo(Hot, Trace.empty)) - extendTrace(superParent) { + extendTrace(superParent): superCall(tref, ctor, args, tasks) - } } // initialize super classes after outers are set @@ -1570,7 +1533,7 @@ object Semantic: var fieldsChanged = true // class body - if thisV.isThisRef || !thisV.asInstanceOf[Warm].isPopulatingParams then tpl.body.foreach { + if thisV.isThisRef || !thisV.asInstanceOf[Warm].isPopulatingParams then tpl.body.foreach: case vdef : ValDef if !vdef.symbol.is(Flags.Lazy) && !vdef.rhs.isEmpty => val res = eval(vdef.rhs, thisV, klass) // TODO: Improve promotion to avoid handling enum initialization specially @@ -1592,7 +1555,6 @@ object Semantic: thisV.asInstanceOf[ThisRef].tryPromoteCurrentObject() fieldsChanged = false eval(tree, thisV, klass) - } // ensure we try promotion once even if class body is empty if fieldsChanged && thisV.isThisRef then @@ -1600,7 +1562,6 @@ object Semantic: // The result value is ignored, use Hot to avoid futile fixed point computation Hot - } /** Check that path in path-dependent types are initialized * diff --git a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala index 7dfbc0b6cfa5..ac2f6abeaf6e 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Trace.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Trace.scala @@ -37,7 +37,7 @@ object Trace: inline def extendTrace[T](node: Tree)(using t: Trace)(op: Trace ?=> T): T = op(using t.add(node)) - def buildStacktrace(trace: Trace, preamble: String)(using Context): String = if trace.isEmpty then "" else preamble + { + def buildStacktrace(trace: Trace, preamble: String)(using Context): String = if trace.isEmpty then "" else preamble `+`: var lastLineNum = -1 var lines: mutable.ArrayBuffer[String] = new mutable.ArrayBuffer trace.foreach { tree => @@ -66,7 +66,6 @@ object Trace: val sb = new StringBuilder for line <- lines do sb.append(line) sb.toString - } /** Used to underline source positions in the stack trace * pos.source must exist diff --git a/compiler/src/dotty/tools/dotc/transform/init/Util.scala b/compiler/src/dotty/tools/dotc/transform/init/Util.scala index 4e60c1325b09..571b449c9204 100644 --- a/compiler/src/dotty/tools/dotc/transform/init/Util.scala +++ b/compiler/src/dotty/tools/dotc/transform/init/Util.scala @@ -39,10 +39,9 @@ object Util: case Apply(fn, args) => val argTps = fn.tpe.widen match case mt: MethodType => mt.paramInfos - val normArgs: List[Arg] = args.zip(argTps).map { + val normArgs: List[Arg] = args.zip(argTps).map: case (arg, _: ExprType) => ByNameArg(arg) case (arg, _) => arg - } unapply(fn) match case Some((ref, args0)) => Some((ref, args0 :+ normArgs)) case None => None @@ -75,10 +74,9 @@ object Util: case _ => None - def resolve(cls: ClassSymbol, sym: Symbol)(using Context): Symbol = log("resove " + cls + ", " + sym, printer, (_: Symbol).show) { + def resolve(cls: ClassSymbol, sym: Symbol)(using Context): Symbol = log("resove " + cls + ", " + sym, printer, (_: Symbol).show): if (sym.isEffectivelyFinal || sym.isConstructor) sym else sym.matchingMember(cls.appliedRef) - } extension (sym: Symbol) @@ -90,13 +88,12 @@ object Util: def isConcreteClass(cls: ClassSymbol)(using Context) = val instantiable: Boolean = cls.is(Flags.Module) || - !cls.isOneOf(Flags.AbstractOrTrait) && { + !cls.isOneOf(Flags.AbstractOrTrait) `&&`: // see `Checking.checkInstantiable` in typer val tp = cls.appliedRef val stp = SkolemType(tp) val selfType = cls.givenSelfType.asSeenFrom(stp, cls) !selfType.exists || stp <:< selfType - } // A concrete class may not be instantiated if the self type is not satisfied instantiable && cls.enclosingPackageClass != defn.StdLibPatchesPackage.moduleClass diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala index ff8d89920791..fb27f4278ad7 100644 --- a/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/localopt/FormatChecker.scala @@ -32,11 +32,10 @@ class TypedFormatChecker(partsElems: List[Tree], parts: List[String], args: List val tpe = argTypes(argi) types.find(t => argConformsTo(argi, tpe, t)) .orElse(types.find(t => argConvertsTo(argi, tpe, t))) - .getOrElse { + .getOrElse: report.argError(s"Found: ${tpe.show}, Required: ${types.map(_.show).mkString(", ")}", argi) actuals += args(argi) types.head - } object formattableTypes: val FormattableType = requiredClassRef("java.util.Formattable") @@ -48,11 +47,10 @@ class TypedFormatChecker(partsElems: List[Tree], parts: List[String], args: List def argConformsTo(argi: Int, arg: Type, target: Type): Boolean = (arg <:< target).tap(if _ then actuals += args(argi)) def argConvertsTo(argi: Int, arg: Type, target: Type): Boolean = import typer.Implicits.SearchSuccess - atPhase(typerPhase) { + atPhase(typerPhase): ctx.typer.inferView(args(argi), target) match case SearchSuccess(view, ref, _, _) => actuals += view ; true case _ => false - } // match a conversion specifier val formatPattern = """%(?:(\d+)\$)?([-#+ 0,(<]+)?(\d+)?(\.\d+)?([tT]?[%a-zA-Z])?""".r @@ -211,9 +209,8 @@ class TypedFormatChecker(partsElems: List[Tree], parts: List[String], args: List kind match case BooleanXn => arg == defn.BooleanType orElse warningAt(CC)("Boolean format is null test for non-Boolean") case IntegralXn => - arg == BigIntType || !cond(cc) { + arg == BigIntType || !cond(cc): case 'o' | 'x' | 'X' if hasAnyFlag("+ (") => "+ (".filter(hasFlag).foreach(bad => badFlag(bad, s"only use '$bad' for BigInt conversions to o, x, X")) ; true - } case _ => true // what arg type if any does the conversion accept diff --git a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala index 5cad7ba72831..e65696590040 100644 --- a/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala +++ b/compiler/src/dotty/tools/dotc/transform/localopt/StringInterpolatorOpt.scala @@ -61,8 +61,8 @@ class StringInterpolatorOpt extends MiniPhase: def unapply(t: Throwable): Option[Int] = t match case iee: StringContext.InvalidEscapeException => Some(iee.index) case iae: IllegalArgumentException => iae.getMessage() match - case s"""invalid unicode escape at index $index of $_""" => index.toIntOption - case _ => None + case s"""invalid unicode escape at index $index of $_""" => index.toIntOption + case _ => None case _ => None /** Match trees that resemble s and raw string interpolations. In the case of the s diff --git a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala index 7238756454b3..4f33ded180ed 100644 --- a/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala +++ b/compiler/src/dotty/tools/dotc/transform/patmat/Space.scala @@ -69,9 +69,8 @@ sealed trait Space: if (a ne a2) || (b ne b2) then a2.isSubspace(b2) else if a == Empty then true else if b == Empty then false - else trace(s"isSubspace(${show(this)}, ${show(b)})", debug) { + else trace(s"isSubspace(${show(this)}, ${show(b)})", debug): isSubspaceCache.getOrElseUpdate(b, computeIsSubspace(a, b)) - } @sharable private var mySimplified: Space | Null = null @@ -115,7 +114,7 @@ case class Prod(tp: Type, unappTp: TermRef, params: List[Space]) extends Space /** Union of spaces */ case class Or(spaces: Seq[Space]) extends Space -object SpaceEngine { +object SpaceEngine: import tpd._ def simplify(space: Space)(using Context): Space = space.simplify @@ -147,16 +146,14 @@ object SpaceEngine { */ def dedup(spaces: Seq[Space])(using Context): Seq[Space] = if (spaces.lengthCompare(1) <= 0 || spaces.lengthCompare(10) >= 0) spaces - else { - val res = spaces.map(sp => (sp, spaces.filter(_ ne sp))).find { + else + val res = spaces.map(sp => (sp, spaces.filter(_ ne sp))).find: case (sp, sps) => isSubspace(sp, Or(LazyList(sps: _*))) - } if (res.isEmpty) spaces else res.get._2 - } /** Flatten space to get rid of `Or` for pretty print */ - def flatten(space: Space)(using Context): Seq[Space] = space match { + def flatten(space: Space)(using Context): Seq[Space] = space match case Prod(tp, fun, spaces) => val ss = LazyList(spaces: _*).map(flatten) @@ -172,14 +169,13 @@ object SpaceEngine { case _ => List(space) - } /** Is `a` a subspace of `b`? Equivalent to `simplify(simplify(a) - simplify(b)) == Empty`, but faster */ - def computeIsSubspace(a: Space, b: Space)(using Context): Boolean = { + def computeIsSubspace(a: Space, b: Space)(using Context): Boolean = val a2 = simplify(a) val b2 = simplify(b) if (a ne a2) || (b ne b2) then isSubspace(a2, b2) - else (a, b) match { + else (a, b) match case (Empty, _) => true case (_, Empty) => false case (Or(ss), _) => ss.forall(isSubspace(_, b)) @@ -199,12 +195,10 @@ object SpaceEngine { && isSubspace(Prod(tp2, fun, signature(fun, tp1, ss.length).map(Typ(_, false))), b) case (Prod(_, fun1, ss1), Prod(_, fun2, ss2)) => isSameUnapply(fun1, fun2) && ss1.lazyZip(ss2).forall(isSubspace) - } - } /** Intersection of two spaces */ - def intersect(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} & ${show(b)}", debug, show) { - (a, b) match { + def intersect(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} & ${show(b)}", debug, show): + (a, b) match case (Empty, _) | (_, Empty) => Empty case (_, Or(ss)) => Or(ss.map(intersect(a, _)).filter(_ ne Empty)) case (Or(ss), _) => Or(ss.map(intersect(_, b)).filter(_ ne Empty)) @@ -228,12 +222,10 @@ object SpaceEngine { if !isSameUnapply(fun1, fun2) then intersectUnrelatedAtomicTypes(tp1, tp2)(a) else if ss1.lazyZip(ss2).exists((a, b) => simplify(intersect(a, b)) == Empty) then Empty else Prod(tp1, fun1, ss1.lazyZip(ss2).map(intersect)) - } - } /** The space of a not covered by b */ - def minus(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} - ${show(b)}", debug, show) { - (a, b) match { + def minus(a: Space, b: Space)(using Context): Space = trace(s"${show(a)} - ${show(b)}", debug, show): + (a, b) match case (Empty, _) => Empty case (_, Empty) => a case (Or(ss), _) => Or(ss.map(minus(_, b))) @@ -275,13 +267,11 @@ object SpaceEngine { flatten(sub(i)).map(s => Prod(tp1, fun1, ss1.updated(i, s))) } Or(spaces) - } - } /** Is the unapply or unapplySeq irrefutable? * @param unapp The unapply function reference */ - def isIrrefutable(unapp: TermRef, argLen: Int)(using Context): Boolean = { + def isIrrefutable(unapp: TermRef, argLen: Int)(using Context): Boolean = val unappResult = unapp.widen.finalResultType unappResult.isRef(defn.SomeClass) || unappResult <:< ConstantType(Constant(true)) // only for unapply @@ -293,23 +283,21 @@ object SpaceEngine { isEmptyTp <:< ConstantType(Constant(false)) } || unappResult.derivesFrom(defn.NonEmptyTupleClass) - } /** Is the unapply or unapplySeq irrefutable? * @param unapp The unapply function tree */ - def isIrrefutable(unapp: tpd.Tree, argLen: Int)(using Context): Boolean = { + def isIrrefutable(unapp: tpd.Tree, argLen: Int)(using Context): Boolean = tpd.funPart(unapp).tpe match case funRef: TermRef => isIrrefutable(funRef, argLen) case _: ErrorType => false - } /** Is this an `'{..}` or `'[..]` irrefutable quoted patterns? * @param unapp The unapply function tree * @param implicits The implicits of the unapply * @param pt The scrutinee type */ - def isIrrefutableQuotedPattern(unapp: tpd.Tree, implicits: List[tpd.Tree], pt: Type)(using Context): Boolean = { + def isIrrefutableQuotedPattern(unapp: tpd.Tree, implicits: List[tpd.Tree], pt: Type)(using Context): Boolean = implicits.headOption match // pattern '{ $x: T } case Some(tpd.Apply(tpd.Select(tpd.Quote(tpd.TypeApply(fn, List(tpt)), _), nme.apply), _)) @@ -323,14 +311,13 @@ object SpaceEngine { pt =:= defn.QuotedTypeClass.typeRef.appliedTo(tpt.tpe) case _ => false - } /** Return a space containing the values of both types. * * The types should be atomic (non-decomposable) and unrelated (neither * should be a subtype of the other). */ - def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type)(sp: Space)(using Context): Space = trace(i"atomic intersection: ${AndType(tp1, tp2)}", debug) { + def intersectUnrelatedAtomicTypes(tp1: Type, tp2: Type)(sp: Space)(using Context): Space = trace(i"atomic intersection: ${AndType(tp1, tp2)}", debug): // Precondition: !isSubType(tp1, tp2) && !isSubType(tp2, tp1). if !ctx.mode.is(Mode.SafeNulls) && (tp1.isNullType || tp2.isNullType) then // Since projections of types don't include null, intersection with null is empty. @@ -344,7 +331,6 @@ object SpaceEngine { else if isPrimToBox(tp1, tp2) || isPrimToBox(tp2, tp1) then intersection else if TypeComparer.provablyDisjoint(tp1, tp2) then Empty else intersection - } /** Return the space that represents the pattern `pat` */ def project(pat: Tree)(using Context): Space = trace(i"project($pat ${pat.className} ${pat.tpe})", debug, show)(pat match { @@ -381,12 +367,11 @@ object SpaceEngine { // Doing so with a pattern of `case Seq() =>` with a scrutinee of type `Vector()` doesn't work because the // space is then discarded leading to a false positive reachability warning, see #13931. projectSeq(pats) - else { + else if (elemTp.exists) Prod(erase(pat.tpe.stripAnnots, isValue = false), funRef, projectSeq(pats) :: Nil) else Prod(erase(pat.tpe.stripAnnots, isValue = false), funRef, pats.take(arity - 1).map(project) :+ projectSeq(pats.drop(arity - 1))) - } else Prod(erase(pat.tpe.stripAnnots, isValue = false), funRef, pats.map(project)) @@ -410,22 +395,19 @@ object SpaceEngine { Typ(pat.tpe.narrow, decomposed = false) }) - private def project(tp: Type)(using Context): Space = tp match { + private def project(tp: Type)(using Context): Space = tp match case OrType(tp1, tp2) => Or(project(tp1) :: project(tp2) :: Nil) case tp => Typ(tp, decomposed = true) - } - private def unapplySeqInfo(resTp: Type, pos: SrcPos)(using Context): (Int, Type, Type) = { + private def unapplySeqInfo(resTp: Type, pos: SrcPos)(using Context): (Int, Type, Type) = var resultTp = resTp var elemTp = unapplySeqTypeElemTp(resultTp) var arity = productArity(resultTp, pos) - if (!elemTp.exists && arity <= 0) { + if (!elemTp.exists && arity <= 0) resultTp = resTp.select(nme.get).finalResultType elemTp = unapplySeqTypeElemTp(resultTp.widen) arity = productSelectorTypes(resultTp, pos).size - } (arity, elemTp, resultTp) - } /** Erase pattern bound types with WildcardType * @@ -500,7 +482,7 @@ object SpaceEngine { /** Space of the pattern: unapplySeq(a, b, c: _*) */ - def projectSeq(pats: List[Tree])(using Context): Space = { + def projectSeq(pats: List[Tree])(using Context): Space = if (pats.isEmpty) return Typ(defn.NilType, false) val (items, zero) = if (isWildcardStarArg(pats.last)) @@ -513,17 +495,15 @@ object SpaceEngine { val consTp = defn.ConsType.appliedTo(pats.head.tpe.widen) Prod(consTp, unapplyTp, project(pat) :: acc :: Nil) } - } def isPrimToBox(tp: Type, pt: Type)(using Context): Boolean = tp.isPrimitiveValueType && (defn.boxedType(tp).classSymbol eq pt.classSymbol) /** Is `tp1` a subtype of `tp2`? */ - def isSubType(tp1: Type, tp2: Type)(using Context): Boolean = trace(i"$tp1 <:< $tp2", debug, show = true) { + def isSubType(tp1: Type, tp2: Type)(using Context): Boolean = trace(i"$tp1 <:< $tp2", debug, show = true): if tp1 == ConstantType(Constant(null)) && !ctx.mode.is(Mode.SafeNulls) then tp2 == ConstantType(Constant(null)) else tp1 <:< tp2 - } /** True if we can assume that the two unapply methods are the same. * That is, given the same parameter, they return the same result. @@ -538,12 +518,12 @@ object SpaceEngine { /** Return term parameter types of the extractor `unapp`. * Parameter types of the case class type `tp`. Adapted from `unapplyPlan` in patternMatcher */ - def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int)(using Context): List[Type] = { + def signature(unapp: TermRef, scrutineeTp: Type, argLen: Int)(using Context): List[Type] = val unappSym = unapp.symbol // println("scrutineeTp = " + scrutineeTp.show) - val mt: MethodType = unapp.widen match { + val mt: MethodType = unapp.widen match case mt: MethodType => mt case pt: PolyType => val tvars = pt.paramInfos.map(newTypeVar(_)) @@ -555,7 +535,6 @@ object SpaceEngine { instantiateSelected(mt, tvars) isFullyDefined(mt, ForceDegree.all) mt - } // Case unapply: // 1. return types of constructor fields if the extractor is synthesized for Scala2 case classes & length match @@ -572,48 +551,40 @@ object SpaceEngine { val sig = if (resTp.isRef(defn.BooleanClass)) List() - else { + else val isUnapplySeq = unappSym.name == nme.unapplySeq - if (isUnapplySeq) { + if (isUnapplySeq) val (arity, elemTp, resultTp) = unapplySeqInfo(resTp, unappSym.srcPos) if (elemTp.exists) defn.ListType.appliedTo(elemTp) :: Nil - else { + else val sels = productSeqSelectors(resultTp, arity, unappSym.srcPos) sels.init :+ defn.ListType.appliedTo(sels.last) - } - } - else { + else val arity = productArity(resTp, unappSym.srcPos) if (arity > 0) productSelectorTypes(resTp, unappSym.srcPos) - else { + else val getTp = resTp.select(nme.get).finalResultType.widenTermRefExpr if (argLen == 1) getTp :: Nil else productSelectorTypes(getTp, unappSym.srcPos) - } - } - } debug.println(s"signature of ${unappSym.showFullName} ----> ${sig.map(_.show).mkString(", ")}") sig.map(_.annotatedToRepeated) - } /** Whether the extractor covers the given type */ def covers(unapp: TermRef, scrutineeTp: Type, argLen: Int)(using Context): Boolean = SpaceEngine.isIrrefutable(unapp, argLen) - || unapp.symbol == defn.TypeTest_unapply && { + || unapp.symbol == defn.TypeTest_unapply `&&`: val AppliedType(_, _ :: tp :: Nil) = unapp.prefix.widen.dealias: @unchecked scrutineeTp <:< tp - } - || unapp.symbol == defn.ClassTagClass_unapply && { + || unapp.symbol == defn.ClassTagClass_unapply `&&`: val AppliedType(_, tp :: Nil) = unapp.prefix.widen.dealias: @unchecked scrutineeTp <:< tp - } /** Decompose a type into subspaces -- assume the type can be decomposed */ - def decompose(tp: Type)(using Context): List[Type] = trace(i"decompose($tp)", debug) { + def decompose(tp: Type)(using Context): List[Type] = trace(i"decompose($tp)", debug): def rec(tp: Type, mixins: List[Type]): List[Type] = tp.dealias match case AndType(tp1, tp2) => var tpB = tp2 @@ -676,7 +647,6 @@ object SpaceEngine { end rec rec(tp, Nil) - } extension (cls: Symbol) /** A type is decomposable to children if it's sealed, @@ -707,7 +677,7 @@ object SpaceEngine { * C --> C if current owner is C !!! * */ - def showType(tp: Type, showTypeArgs: Boolean = false)(using Context): String = { + def showType(tp: Type, showTypeArgs: Boolean = false)(using Context): String = val enclosingCls = ctx.owner.enclosingClass def isOmittable(sym: Symbol) = @@ -716,7 +686,7 @@ object SpaceEngine { sym.showFullName.startsWith("scala.") || sym == enclosingCls || sym == enclosingCls.sourceModule - def refinePrefix(tp: Type): String = tp match { + def refinePrefix(tp: Type): String = tp match case NoPrefix => "" case tp: NamedType if isOmittable(tp.symbol) => "" case tp: ThisType => refinePrefix(tp.tref) @@ -724,9 +694,8 @@ object SpaceEngine { case tp: NamedType => tp.name.show.stripSuffix("$") case tp: TypeVar => refinePrefix(tp.instanceOpt) case _ => tp.show - } - def refine(tp: Type): String = tp.stripped match { + def refine(tp: Type): String = tp.stripped match case tp: RefinedType => refine(tp.parent) case tp: AppliedType => refine(tp.typeConstructor) + ( @@ -742,57 +711,48 @@ object SpaceEngine { case tp: OrType => refine(tp.tp1) + " | " + refine(tp.tp2) case _: TypeBounds => "_" case _ => tp.show.stripSuffix("$") - } refine(tp) - } /** Whether the counterexample is satisfiable. The space is flattened and non-empty. */ - def satisfiable(sp: Space)(using Context): Boolean = { + def satisfiable(sp: Space)(using Context): Boolean = def impossible: Nothing = throw new AssertionError("`satisfiable` only accepts flattened space.") - def genConstraint(space: Space): List[(Type, Type)] = space match { + def genConstraint(space: Space): List[(Type, Type)] = space match case Prod(tp, unappTp, ss) => val tps = signature(unappTp, tp, ss.length) - ss.zip(tps).flatMap { + ss.zip(tps).flatMap: case (sp : Prod, tp) => sp.tp -> tp :: genConstraint(sp) case (Typ(tp1, _), tp2) => tp1 -> tp2 :: Nil case _ => impossible - } case Typ(_, _) => Nil case _ => impossible - } - def checkConstraint(constrs: List[(Type, Type)])(using Context): Boolean = { + def checkConstraint(constrs: List[(Type, Type)])(using Context): Boolean = val tvarMap = collection.mutable.Map.empty[Symbol, TypeVar] - val typeParamMap = new TypeMap() { - override def apply(tp: Type): Type = tp match { + val typeParamMap = new TypeMap(): + override def apply(tp: Type): Type = tp match case tref: TypeRef if tref.symbol.is(TypeParam) => tvarMap.getOrElseUpdate(tref.symbol, newTypeVar(tref.underlying.bounds)) case tp => mapOver(tp) - } - } constrs.forall { case (tp1, tp2) => typeParamMap(tp1) <:< typeParamMap(tp2) } - } checkConstraint(genConstraint(sp))(using ctx.fresh.setNewTyperState()) - } def showSpaces(ss: Seq[Space])(using Context): String = ss.map(show).mkString(", ") /** Display spaces */ - def show(s: Space)(using Context): String = { + def show(s: Space)(using Context): String = def params(tp: Type): List[Type] = tp.classSymbol.primaryConstructor.info.firstParamTypes /** does the companion object of the given symbol have custom unapply */ - def hasCustomUnapply(sym: Symbol): Boolean = { + def hasCustomUnapply(sym: Symbol): Boolean = val companion = sym.companionModule companion.findMember(nme.unapply, NoPrefix, required = EmptyFlags, excluded = Synthetic).exists || companion.findMember(nme.unapplySeq, NoPrefix, required = EmptyFlags, excluded = Synthetic).exists - } - def doShow(s: Space, flattenList: Boolean = false): String = s match { + def doShow(s: Space, flattenList: Boolean = false): String = s match case Empty => "empty" case Typ(c: ConstantType, _) => "" + c.value.value case Typ(tp: TermRef, _) => @@ -821,20 +781,17 @@ object SpaceEngine { else if (tp.isRef(defn.ConsType.symbol)) if (flattenList) params.map(doShow(_, flattenList)).filter(_.nonEmpty).mkString(", ") else params.map(doShow(_, flattenList = true)).filter(!_.isEmpty).mkString("List(", ", ", ")") - else { + else val sym = fun.symbol val isUnapplySeq = sym.name.eq(nme.unapplySeq) val paramsStr = params.map(doShow(_, flattenList = isUnapplySeq)).mkString("(", ", ", ")") showType(fun.prefix) + paramsStr - } case Or(ss) => ss.map(doShow(_, flattenList)).mkString(" | ") - } doShow(s, flattenList = false) - } - private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = { + private def exhaustivityCheckable(sel: Tree)(using Context): Boolean = val seen = collection.mutable.Set.empty[Type] // Possible to check everything, but be compatible with scalac by default @@ -849,27 +806,23 @@ object SpaceEngine { }) || tpw.isRef(defn.BooleanClass) || classSym.isAllOf(JavaEnumTrait) || - classSym.is(Case) && { + classSym.is(Case) `&&`: if seen.add(tpw) then productSelectorTypes(tpw, sel.srcPos).exists(isCheckable(_)) else true // recursive case class: return true and other members can still fail the check - } - val res = !sel.tpe.hasAnnotation(defn.UncheckedAnnot) && { + val res = !sel.tpe.hasAnnotation(defn.UncheckedAnnot) `&&`: ctx.settings.YcheckAllPatmat.value || isCheckable(sel.tpe) - } debug.println(s"exhaustivity checkable: ${sel.show} = $res") res - } /** Whether counter-examples should be further checked? True for GADTs. */ private def shouldCheckExamples(tp: Type)(using Context): Boolean = new TypeAccumulator[Boolean] { - override def apply(b: Boolean, tp: Type): Boolean = tp match { + override def apply(b: Boolean, tp: Type): Boolean = tp match case tref: TypeRef if tref.symbol.is(TypeParam) && variance != 1 => true case tp => b || foldOver(b, tp) - } }.apply(false, tp) /** Return the underlying type of non-module, non-constant, non-enum case singleton types. @@ -885,7 +838,7 @@ object SpaceEngine { case _ => tp }) - def checkExhaustivity(m: Match)(using Context): Unit = if exhaustivityCheckable(m.selector) then trace(i"checkExhaustivity($m)", debug) { + def checkExhaustivity(m: Match)(using Context): Unit = if exhaustivityCheckable(m.selector) then trace(i"checkExhaustivity($m)", debug): val selTyp = toUnderlying(m.selector.tpe).dealias debug.println(i"selTyp = $selTyp") @@ -907,7 +860,6 @@ object SpaceEngine { val hasMore = uncovered.lengthCompare(6) > 0 val deduped = dedup(uncovered.take(6)) report.warning(PatternMatchExhaustivity(showSpaces(deduped), hasMore), m.selector) - } private def redundancyCheckable(sel: Tree)(using Context): Boolean = // Ignore Expr[T] and Type[T] for unreachability as a special case. @@ -919,7 +871,7 @@ object SpaceEngine { && !sel.tpe.widen.isRef(defn.QuotedExprClass) && !sel.tpe.widen.isRef(defn.QuotedTypeClass) - def checkRedundancy(m: Match)(using Context): Unit = if redundancyCheckable(m.selector) then trace(i"checkRedundancy($m)", debug) { + def checkRedundancy(m: Match)(using Context): Unit = if redundancyCheckable(m.selector) then trace(i"checkRedundancy($m)", debug): val cases = m.cases.toIndexedSeq val selTyp = toUnderlying(m.selector.tpe).dealias @@ -936,7 +888,7 @@ object SpaceEngine { var prevs = List.empty[Space] var deferred = List.empty[Tree] - while (i < len) { + while (i < len) val CaseDef(pat, guard, _) = cases(i) debug.println(i"case pattern: $pat") @@ -952,23 +904,18 @@ object SpaceEngine { if prev == Empty && covered == Empty then // defer until a case is reachable deferred ::= pat - else { + else for (pat <- deferred.reverseIterator) report.warning(MatchCaseUnreachable(), pat.srcPos) if pat != EmptyTree // rethrow case of catch uses EmptyTree && !pat.symbol.isAllOf(SyntheticCase, butNot=Method) // ExpandSAMs default cases use SyntheticCase && isSubspace(covered, prev) - then { + then val nullOnly = isNullable && i == len - 1 && isWildcardArg(pat) val msg = if nullOnly then MatchCaseOnlyNullWarning() else MatchCaseUnreachable() report.warning(msg, pat.srcPos) - } deferred = Nil - } // in redundancy check, take guard as false in order to soundly approximate prevs ::= (if guard.isEmpty then covered else Empty) i += 1 - } - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala index 6471e58d4ddc..6ff9de469729 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/AddLocalJSFakeNews.scala @@ -57,42 +57,36 @@ class AddLocalJSFakeNews extends MiniPhase { thisPhase => override def runsAfter: Set[String] = Set(Erasure.name) - override def transformApply(tree: Apply)(using Context): Tree = { - if (tree.symbol == jsdefn.Runtime_createLocalJSClass) { + override def transformApply(tree: Apply)(using Context): Tree = + if (tree.symbol == jsdefn.Runtime_createLocalJSClass) val classValueArg :: superClassValueArg :: _ :: Nil = tree.args: @unchecked - val cls = classValueArg match { + val cls = classValueArg match case Literal(constant) if constant.tag == Constants.ClazzTag => constant.typeValue.typeSymbol.asClass case _ => // this shouldn't happen report.error(em"unexpected $classValueArg for the first argument to `createLocalJSClass`", classValueArg) jsdefn.JSObjectClass - } - val fakeNews = { + val fakeNews = val ctors = cls.info.decls.lookupAll(nme.CONSTRUCTOR).toList.reverse val elems = ctors.map(ctor => fakeNew(cls, ctor.asTerm)) JavaSeqLiteral(elems, TypeTree(defn.ObjectType)) - } cpy.Apply(tree)(tree.fun, classValueArg :: superClassValueArg :: fakeNews :: Nil) - } else { + else tree - } - } /** Creates a fake invocation of the given class with the given constructor. */ - private def fakeNew(cls: ClassSymbol, ctor: TermSymbol)(using Context): Tree = { + private def fakeNew(cls: ClassSymbol, ctor: TermSymbol)(using Context): Tree = val tycon = cls.typeRef val outerArgs = outer.argsForNew(cls, tycon) val nonOuterArgCount = ctor.info.firstParamTypes.size - outerArgs.size val nonOuterArgs = List.fill(nonOuterArgCount)(ref(defn.Predef_undefined).appliedToNone) New(tycon, ctor, outerArgs ::: nonOuterArgs) - } } -object AddLocalJSFakeNews { +object AddLocalJSFakeNews: val name: String = "addLocalJSFakeNews" val description: String = "adds fake new invocations to local JS classes in calls to `createLocalJSClass`" -} diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala index 705b3cc404a8..6be9174f9be4 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/ExplicitJSClasses.scala @@ -257,17 +257,15 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => * * The latter is necessary for scala-js/scala-js#4086. */ - private def mayNeedJSClassOrJSObjectFields(sym: Symbol)(using Context): Boolean = { + private def mayNeedJSClassOrJSObjectFields(sym: Symbol)(using Context): Boolean = !sym.isStaticOwner || (sym.is(ModuleClass) && sym.hasAnnotation(jsdefn.JSTypeAnnot) && !sym.hasAnnotation(jsdefn.JSNativeAnnot)) - } /** Is the given symbol a JS class (that is not a trait nor an object)? */ - private def isJSClass(sym: Symbol)(using Context): Boolean = { + private def isJSClass(sym: Symbol)(using Context): Boolean = sym.isClass && !sym.isOneOf(Trait | Module) && sym.hasAnnotation(jsdefn.JSTypeAnnot) - } /** Is the given symbol a Module that should be exposed? */ private def isExposedModule(sym: Symbol)(using Context): Boolean = @@ -290,12 +288,11 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => isInnerJSClassOrObject(sym) || isLocalJSClassOrObject(sym) /** Is the given clazz an inner JS class or object? */ - private def isInnerJSClassOrObject(clazz: Symbol)(using Context): Boolean = { + private def isInnerJSClassOrObject(clazz: Symbol)(using Context): Boolean = clazz.hasAnnotation(jsdefn.JSTypeAnnot) && !clazz.isOneOf(PackageClass | Trait) && !clazz.isStatic && !clazz.isLocalToBlock - } /** Is the given clazz a local JS class or object? */ private def isLocalJSClassOrObject(clazz: Symbol)(using Context): Boolean = @@ -323,18 +320,16 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => private def jsobjectGetterNameFor(moduleSym: Symbol)(using Context): TermName = jsobjectGetterName(moduleSym.name.asTermName) - private def makeJSNameAnnotation(argument: String)(using Context): Annotation = { + private def makeJSNameAnnotation(argument: String)(using Context): Annotation = val annotClass = jsdefn.JSNameAnnot val stringCtor = annotClass.info.decl(nme.CONSTRUCTOR).suchThat { ctor => - ctor.info match { + ctor.info match case mt: MethodType => mt.paramInfos.nonEmpty && mt.paramInfos.head.derivesFrom(defn.StringClass) case _ => false - } }.symbol.asTerm Annotation(New(annotClass.typeRef, stringCtor, Literal(Constant(argument)) :: Nil)) - } - override def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match { + override def transformInfo(tp: Type, sym: Symbol)(using Context): Type = tp match case tp @ ClassInfo(_, cls, _, decls, _) if !cls.is(JavaDefined) && mayNeedJSClassOrJSObjectFields(cls) => val innerJSClasses = decls.filter(isJSClass) @@ -342,36 +337,32 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => if (!cls.isStaticOwner) Nil // those already have a module accessor else decls.filter(isExposedModule) - if (innerJSClasses.isEmpty && innerObjectsForAdHocExposed.isEmpty) { + if (innerJSClasses.isEmpty && innerObjectsForAdHocExposed.isEmpty) tp - } else { - def addAnnots(sym: Symbol, symForName: Symbol): Unit = { - val jsNameAnnot = symForName.getAnnotation(jsdefn.JSNameAnnot).getOrElse { + else + def addAnnots(sym: Symbol, symForName: Symbol): Unit = + val jsNameAnnot = symForName.getAnnotation(jsdefn.JSNameAnnot).getOrElse: makeJSNameAnnotation(symForName.defaultJSName) - } sym.addAnnotation(jsNameAnnot) sym.addAnnotation(jsdefn.ExposedJSMemberAnnot) - } val clsIsJSClass = cls.hasAnnotation(jsdefn.JSTypeAnnot) val decls1 = decls.cloneScope - for (innerJSClass <- innerJSClasses) { - def addAnnotsIfInJSClass(sym: Symbol): Unit = { + for (innerJSClass <- innerJSClasses) + def addAnnotsIfInJSClass(sym: Symbol): Unit = if (clsIsJSClass) addAnnots(sym, innerJSClass) - } val fieldName = jsclassFieldName(innerJSClass.name.asTypeName) val fieldFlags = SyntheticArtifact val field = newSymbol(cls, fieldName, fieldFlags, defn.AnyRefType, coord = innerJSClass.coord) addAnnotsIfInJSClass(field) decls1.enter(field) - } // scala-js/scala-js#4086 Create exposed getters for exposed objects in static JS objects - for (innerObject <- innerObjectsForAdHocExposed) { + for (innerObject <- innerObjectsForAdHocExposed) assert(clsIsJSClass && cls.is(ModuleClass) && cls.isStatic, i"trying to ad-hoc expose objects in non-JS static object ${cls.fullName}") @@ -380,37 +371,31 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => val getter = newSymbol(cls, getterName, getterFlags, ExprType(defn.AnyRefType), coord = innerObject.coord) addAnnots(getter, innerObject) decls1.enter(getter) - } tp.derivedClassInfo(decls = decls1) - } case _ => tp - } /** Adjust the `NoInits` flag of Scala traits containing a JS class and of JS traits. */ - override def transform(ref: SingleDenotation)(using Context): SingleDenotation = { - super.transform(ref) match { + override def transform(ref: SingleDenotation)(using Context): SingleDenotation = + super.transform(ref) match case ref1: SymDenotation if ref1.is(Trait, butNot = JavaDefined) => val isJSType = ref1.hasAnnotation(jsdefn.JSTypeAnnot) - if (ref1.is(NoInits)) { + if (ref1.is(NoInits)) // If one of the decls is a JS class, there is now some initialization code to create the JS class if (!isJSType && ref1.info.decls.exists(isJSClass)) ref1.copySymDenotation(initFlags = ref1.flags &~ NoInits) else ref1 - } else { + else // JS traits never have an initializer, no matter what dotc thinks if (isJSType) ref1.copySymDenotation(initFlags = ref1.flags | NoInits) else ref1 - } case ref1 => ref1 - } - } override def infoMayChange(sym: Symbol)(using Context): Boolean = sym.isClass && !sym.is(JavaDefined) @@ -422,28 +407,23 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => * a `Block` or `Template`, so that they are visible even before their * definition (in their enclosing scope). */ - private def populateNestedObject2superClassTpe(stats: List[Tree])(using Context): Unit = { - for (stat <- stats) { - stat match { + private def populateNestedObject2superClassTpe(stats: List[Tree])(using Context): Unit = + for (stat <- stats) + stat match case cd @ TypeDef(_, rhs) if cd.isClassDef && isInnerOrLocalJSObject(cd.symbol) => myState.nestedObject2superTypeConstructor(cd.symbol) = extractSuperTypeConstructor(rhs) case _ => - } - } - } - override def prepareForBlock(tree: Block)(using Context): Context = { + override def prepareForBlock(tree: Block)(using Context): Context = populateNestedObject2superClassTpe(tree.stats) ctx - } - override def prepareForTemplate(tree: Template)(using Context): Context = { + override def prepareForTemplate(tree: Template)(using Context): Context = populateNestedObject2superClassTpe(tree.body) ctx - } // This method implements steps (A) and (B) - override def transformTemplate(tree: Template)(using Context): Tree = { + override def transformTemplate(tree: Template)(using Context): Tree = val cls = ctx.owner.asClass /* The `parents` of a Template have the same trees as `new` invocations @@ -457,30 +437,28 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => if (!cls.isJSType) tree.parents // fast path else tree.parents.mapConserve(unwrapWithContextualJSClassValue(_)) - if (!mayNeedJSClassOrJSObjectFields(cls)) { + if (!mayNeedJSClassOrJSObjectFields(cls)) if (fixedParents eq tree.parents) tree else cpy.Template(tree)(parents = fixedParents) - } else { + else val newStats = List.newBuilder[Tree] - for (stat <- tree.body) { - stat match { + for (stat <- tree.body) + stat match case stat: TypeDef if stat.isClassDef && isJSClass(stat.symbol) => val innerClassSym = stat.symbol.asClass val jsclassAccessor = jsclassAccessorFor(innerClassSym) - val rhs = if (cls.hasAnnotation(jsdefn.JSNativeAnnot)) { + val rhs = if (cls.hasAnnotation(jsdefn.JSNativeAnnot)) ref(jsdefn.JSPackage_native) - } else { + else val clazzValue = clsOf(innerClassSym.typeRef) - if (cls.isStaticOwner) { + if (cls.isStaticOwner) // scala-js/scala-js#4086 ref(jsdefn.Runtime_constructorOf).appliedTo(clazzValue) - } else { + else val parentTpe = extractSuperTypeConstructor(stat.rhs) val superClassCtor = genJSConstructorOf(tree, parentTpe) ref(jsdefn.Runtime_createInnerJSClass).appliedTo(clazzValue, superClassCtor) - } - } newStats += ValDef(jsclassAccessor, rhs) @@ -492,44 +470,37 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => case _ => () // nothing to do - } newStats += stat - } cpy.Template(tree)(tree.constr, fixedParents, Nil, tree.self, newStats.result()) - } - } // This method, together with transformTypeDef, implements step (C) - override def prepareForTypeDef(tree: TypeDef)(using Context): Context = { + override def prepareForTypeDef(tree: TypeDef)(using Context): Context = val sym = tree.symbol - if (sym.isClass && isLocalJSClass(sym)) { + if (sym.isClass && isLocalJSClass(sym)) val jsclassValName = LocalJSClassValueName.fresh(sym.name.toTermName) val jsclassVal = newSymbol(ctx.owner, jsclassValName, EmptyFlags, defn.AnyRefType, coord = tree.span) myState.localClass2jsclassVal(sym) = jsclassVal myState.notYetReferencedLocalClasses += sym - } ctx - } // This method, together with prepareForTypeDef, implements step (C) - override def transformTypeDef(tree: TypeDef)(using Context): Tree = { + override def transformTypeDef(tree: TypeDef)(using Context): Tree = val sym = tree.symbol - if (sym.isClass && isLocalJSClass(sym)) { + if (sym.isClass && isLocalJSClass(sym)) val cls = sym.asClass - val rhs = { + val rhs = val typeRef = tree.tpe val clazzValue = clsOf(typeRef) val superClassCtor = genJSConstructorOf(tree, extractSuperTypeConstructor(tree.rhs)) ref(jsdefn.Runtime_createLocalJSClass).appliedTo(clazzValue, superClassCtor, ref(defn.Predef_undefined)) - } val jsclassVal = myState.localClass2jsclassVal(sym) - if (myState.notYetReferencedLocalClasses.remove(cls)) { + if (myState.notYetReferencedLocalClasses.remove(cls)) Thicket(List(tree, ValDef(jsclassVal, rhs))) - } else { + else /* We are using `jsclassVal` inside the definition of the class. * We need to declare it as var before and initialize it after the class definition. */ @@ -539,62 +510,52 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => tree, Assign(ref(jsclassVal), rhs) )) - } - } else { + else tree - } - } // This method, together with transformTypeApply and transformSelect, implements step (E) - override def transformApply(tree: Apply)(using Context): Tree = { - if (!isFullyApplied(tree)) { + override def transformApply(tree: Apply)(using Context): Tree = + if (!isFullyApplied(tree)) tree - } else { + else val sym = tree.symbol - if (sym.isConstructor) { + if (sym.isConstructor) /* Wrap `new`s to inner and local JS classes and objects with * `withContextualJSClassValue`, to preserve a reified reference to * the necessary JS class value (the class itself for classes, or the * super class for objects). */ val cls = sym.owner - if (isInnerOrLocalJSClassOrObject(cls)) { - if (!isConsideredAnObject(cls)) { - methPart(tree) match { + if (isInnerOrLocalJSClassOrObject(cls)) + if (!isConsideredAnObject(cls)) + methPart(tree) match case Select(n @ New(tpt), _) => val jsclassValue = genJSConstructorOf(tpt, n.tpe) wrapWithContextualJSClassValue(jsclassValue)(tree) case _ => // Super constructor call or this()-constructor call tree - } - } else { + else wrapWithContextualJSClassValue(myState.nestedObject2superTypeConstructor(cls))(tree) - } - } else { + else tree - } - } else { + else maybeWrapSuperCallWithContextualJSClassValue(tree) - } - } - } // This method, together with transformApply and transformSelect, implements step (E) // It also implements step (D) and (F) - override def transformTypeApply(tree: TypeApply)(using Context): Tree = { - if (!isFullyApplied(tree)) { + override def transformTypeApply(tree: TypeApply)(using Context): Tree = + if (!isFullyApplied(tree)) tree - } else { + else val sym = tree.symbol - def isTypeTreeForInnerOrLocalJSClass(tpeArg: Tree): Boolean = { + def isTypeTreeForInnerOrLocalJSClass(tpeArg: Tree): Boolean = val tpeSym = tpeArg.tpe.typeSymbol tpeSym.exists && isInnerOrLocalJSClass(tpeSym) - } - tree match { + tree match // Desugar js.constructorOf[T] case TypeApply(fun, tpt :: Nil) if sym == jsdefn.JSPackage_constructorOf => genJSConstructorOf(tree, tpt.tpe).cast(jsdefn.JSDynamicType) @@ -607,18 +568,13 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => case _ => maybeWrapSuperCallWithContextualJSClassValue(tree) - } - } - } // This method, together with transformApply and transformTypeApply, implements step (E) - override def transformSelect(tree: Select)(using Context): Tree = { - if (!isFullyApplied(tree)) { + override def transformSelect(tree: Select)(using Context): Tree = + if (!isFullyApplied(tree)) tree - } else { + else maybeWrapSuperCallWithContextualJSClassValue(tree) - } - } /** Tests whether this tree is fully applied, i.e., it does not need any * additional `TypeApply` or `Apply` to lead to a value. @@ -633,54 +589,48 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => * `withContextualJSClassValue`, to preserve a reified reference to the * necessary JS class value (that of the super class). */ - private def maybeWrapSuperCallWithContextualJSClassValue(tree: Tree)(using Context): Tree = { - methPart(tree) match { + private def maybeWrapSuperCallWithContextualJSClassValue(tree: Tree)(using Context): Tree = + methPart(tree) match case Select(sup: Super, _) if isInnerOrLocalJSClass(sup.symbol.asClass.superClass) => wrapWithContextualJSClassValue(sup.symbol.asClass.superClass.typeRef)(tree) case _ => tree - } - } /** Generates the desugared version of `js.constructorOf[tpe]`. * * This is the meat of step (F). */ - private def genJSConstructorOf(tree: Tree, tpe0: Type)(using Context): Tree = { - val tpe = tpe0.underlyingClassRef(refinementOK = false) match { + private def genJSConstructorOf(tree: Tree, tpe0: Type)(using Context): Tree = + val tpe = tpe0.underlyingClassRef(refinementOK = false) match case typeRef: TypeRef => typeRef case _ => // This should not have passed the checks in PrepJSInterop report.error(em"class type required but found $tpe0", tree) jsdefn.JSObjectType - } val cls = tpe.typeSymbol // This should not have passed the checks in PrepJSInterop assert(!cls.isOneOf(Trait | ModuleClass), i"non-trait class type required but $tpe found for genJSConstructorOf at ${tree.sourcePos}") - if (isInnerJSClass(cls)) { + if (isInnerJSClass(cls)) // Use the $jsclass field in the outer instance val prefix: Type = tpe.prefix - if (prefix.isStable) { + if (prefix.isStable) val jsclassAccessor = jsclassAccessorFor(cls) ref(NamedType(prefix, jsclassAccessor.name, jsclassAccessor.denot)) - } else { + else report.error(em"stable reference to a JS class required but $tpe found", tree) ref(defn.Predef_undefined) - } - } else if (isLocalJSClass(cls)) { + else if (isLocalJSClass(cls)) // Use the local `val` that stores the JS class value val state = myState val jsclassVal = state.localClass2jsclassVal(cls) state.notYetReferencedLocalClasses -= cls ref(jsclassVal) - } else { + else // Defer translation to `LoadJSConstructor` to the back-end ref(jsdefn.Runtime_constructorOf).appliedTo(clsOf(tpe)) - } - } private def wrapWithContextualJSClassValue(jsClassType: Type)(tree: Tree)(using Context): Tree = wrapWithContextualJSClassValue(genJSConstructorOf(tree, jsClassType))(tree) @@ -688,13 +638,12 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => private def wrapWithContextualJSClassValue(jsClassValue: Tree)(tree: Tree)(using Context): Tree = ref(jsdefn.Runtime_withContextualJSClassValue).appliedToType(tree.tpe).appliedTo(jsClassValue, tree) - private def unwrapWithContextualJSClassValue(tree: Tree)(using Context): Tree = tree match { + private def unwrapWithContextualJSClassValue(tree: Tree)(using Context): Tree = tree match case Apply(fun, jsClassValue :: actualTree :: Nil) if fun.symbol == jsdefn.Runtime_withContextualJSClassValue => actualTree case _ => tree - } /** Extracts the super type constructor of a `Template`, without type * parameters, so that the type is well-formed outside of the `Template`, @@ -715,15 +664,13 @@ class ExplicitJSClasses extends MiniPhase with InfoTransformer { thisPhase => typeDefRhs.asInstanceOf[Template].parents.head.tpe.dealias.typeConstructor } -object ExplicitJSClasses { +object ExplicitJSClasses: val name: String = "explicitJSClasses" val description: String = "make all JS classes explicit" val LocalJSClassValueName: UniqueNameKind = new UniqueNameKind("$jsclass") - private final class MyState { + private final class MyState: val nestedObject2superTypeConstructor = new MutableSymbolMap[Type] val localClass2jsclassVal = new MutableSymbolMap[TermSymbol] val notYetReferencedLocalClasses = new util.HashSet[Symbol] - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala index 9abf9a919d6d..dd212752d016 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSExportUtils.scala @@ -10,23 +10,22 @@ import Names._ /** Utilities for JS exports handling. */ -object JSExportUtils { +object JSExportUtils: private inline val ExportPrefix = "$js$exported$" private inline val MethodExportPrefix = ExportPrefix + "meth$" private inline val PropExportPrefix = ExportPrefix + "prop$" /** Creates a name for an export specification. */ - def makeExportName(jsName: String, isProp: Boolean): TermName = { + def makeExportName(jsName: String, isProp: Boolean): TermName = val prefix = if (isProp) PropExportPrefix else MethodExportPrefix termName(prefix + jsName) - } /** Is this symbol an export forwarder? */ def isExportName(name: Name): Boolean = name.startsWith(ExportPrefix) && !name.is(DefaultGetterName) /** Retrieves the originally assigned jsName of this export and whether it is a property. */ - def exportNameInfo(name: Name): (String, Boolean) = { + def exportNameInfo(name: Name): (String, Boolean) = val nameString = name.toString() if (nameString.startsWith(MethodExportPrefix)) (nameString.substring(MethodExportPrefix.length), false) @@ -34,5 +33,3 @@ object JSExportUtils { (nameString.substring(PropExportPrefix.length), true) else throw new IllegalArgumentException(s"non-exported name passed to jsExportInfo: $name") - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala index 115d41dd3d46..c127b387c1ae 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JSSymUtils.scala @@ -20,30 +20,28 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn import org.scalajs.ir.{Trees => js} /** Additional extensions for `Symbol`s that are only relevant for Scala.js. */ -object JSSymUtils { +object JSSymUtils: /** The result type for `sym.jsName`. * * It is either a literal string, or a computed name represented by a reference * to a static `Symbol` (a `dotc.core.Symbols.Symbol`, not a `js.Symbol`). */ - enum JSName { + enum JSName: case Literal(name: String) case Computed(sym: Symbol) - def displayName(using Context): String = this match { + def displayName(using Context): String = this match case Literal(name) => name case Computed(sym) => sym.fullName.toString() - } - } - enum JSCallingConvention { + enum JSCallingConvention: case Call, BracketAccess, BracketCall case Method(name: JSName) case Property(name: JSName) case UnaryOp(code: js.JSUnaryOp.Code) case BinaryOp(code: js.JSBinaryOp.Code) - def displayName(using Context): String = this match { + def displayName(using Context): String = this match case Call => "function application" case BracketAccess => "bracket access" case BracketCall => "bracket call" @@ -51,39 +49,31 @@ object JSSymUtils { case Property(name) => "property '" + name.displayName + "'" case UnaryOp(code) => "unary operator" case BinaryOp(code) => "binary operator" - } - } - object JSCallingConvention { - def of(sym: Symbol)(using Context): JSCallingConvention = { + object JSCallingConvention: + def of(sym: Symbol)(using Context): JSCallingConvention = assert(sym.isTerm, s"got non-term symbol: $sym") - if (isJSBracketAccess(sym)) { + if (isJSBracketAccess(sym)) BracketAccess - } else if (isJSBracketCall(sym)) { + else if (isJSBracketCall(sym)) BracketCall - } else { - def default = { + else + def default = val jsName = sym.jsName if (sym.isJSProperty) Property(jsName) else Method(jsName) - } - if (!sym.hasAnnotation(jsdefn.JSNameAnnot)) { + if (!sym.hasAnnotation(jsdefn.JSNameAnnot)) lazy val pc = sym.info.paramNamess.map(_.size).sum - sym.name match { + sym.name match case nme.apply => Call case JSUnaryOpMethodName(code) if pc == 0 => UnaryOp(code) case JSBinaryOpMethodName(code) if pc == 1 => BinaryOp(code) case _ => default - } - } else { + else default - } - } - } - } /** Info about a Scala method param when called as JS method. * @@ -98,12 +88,11 @@ object JSSymUtils { val info: Type, val repeated: Boolean = false, val capture: Boolean = false - ) { + ): override def toString(): String = s"ParamSpec($info, repeated = $repeated, capture = $capture)" - } - extension (sym: Symbol) { + extension (sym: Symbol) /** Is this symbol a JavaScript type? */ def isJSType(using Context): Boolean = sym.hasAnnotation(jsdefn.JSTypeAnnot) @@ -119,19 +108,16 @@ object JSSymUtils { /** Tests whether the given member is exposed, i.e., whether it was * originally a public or protected member of a non-native JS class. */ - def isJSExposed(using Context): Boolean = { - !sym.is(Bridge) && { + def isJSExposed(using Context): Boolean = + !sym.is(Bridge) `&&`: sym.hasAnnotation(jsdefn.ExposedJSMemberAnnot) || (sym.is(Accessor) && sym.field.hasAnnotation(jsdefn.ExposedJSMemberAnnot)) - } - } /** Should this symbol be translated into a JS getter? */ - def isJSGetter(using Context): Boolean = { + def isJSGetter(using Context): Boolean = sym.is(Module) || !sym.is(Method) || (sym.info.firstParamTypes.isEmpty && atPhaseNoLater(erasurePhase)(sym.info.isParameterless)) - } /** Should this symbol be translated into a JS setter? */ def isJSSetter(using Context): Boolean = @@ -160,44 +146,39 @@ object JSSymUtils { * If it is not explicitly specified with an `@JSName` annotation, the * JS name is inferred from the Scala name. */ - def jsName(using Context): JSName = { + def jsName(using Context): JSName = sym.getAnnotation(jsdefn.JSNameAnnot).fold[JSName] { JSName.Literal(defaultJSName) } { annotation => - annotation.arguments.head match { + annotation.arguments.head match case Literal(Constant(name: String)) => JSName.Literal(name) case tree => JSName.Computed(tree.symbol) - } } - } def defaultJSName(using Context): String = if (sym.isTerm) sym.asTerm.name.unexpandedName.getterName.toString() else sym.name.unexpandedName.stripModuleClassSuffix.toString() - def jsParamInfos(using Context): List[JSParamInfo] = { + def jsParamInfos(using Context): List[JSParamInfo] = assert(sym.is(Method), s"trying to take JS param info of non-method: $sym") def paramNamesAndTypes(using Context): List[(Names.TermName, Type)] = sym.info.paramNamess.flatten.zip(sym.info.paramInfoss.flatten) - val paramInfosAtElimRepeated = atPhase(elimRepeatedPhase) { + val paramInfosAtElimRepeated = atPhase(elimRepeatedPhase): val list = - for ((name, info) <- paramNamesAndTypes) yield { + for ((name, info) <- paramNamesAndTypes) yield val v = if (info.isRepeatedParam) Some(info.repeatedToSingle.widenDealias) else None name -> v - } list.toMap - } - val paramInfosAtElimEVT = atPhase(elimErasedValueTypePhase) { + val paramInfosAtElimEVT = atPhase(elimErasedValueTypePhase): paramNamesAndTypes.toMap - } - for ((paramName, paramInfoNow) <- paramNamesAndTypes) yield { - paramInfosAtElimRepeated.get(paramName) match { + for ((paramName, paramInfoNow) <- paramNamesAndTypes) yield + paramInfosAtElimRepeated.get(paramName) match case None => // This is a capture parameter introduced by erasure or lambdalift new JSParamInfo(paramInfoNow, capture = true) @@ -208,9 +189,6 @@ object JSSymUtils { case Some(None) => val info = paramInfosAtElimEVT.getOrElse(paramName, paramInfoNow) new JSParamInfo(info) - } - } - } /** Tests whether the semantics of Scala.js require a field for this symbol, * irrespective of any optimization we think we can do. @@ -228,9 +206,8 @@ object JSSymUtils { || sym.hasAnnotation(jsdefn.JSExportStaticAnnot) ) end sjsNeedsField - } - private object JSUnaryOpMethodName { + private object JSUnaryOpMethodName: private val map = Map( nme.UNARY_+ -> js.JSUnaryOp.+, nme.UNARY_- -> js.JSUnaryOp.-, @@ -240,9 +217,8 @@ object JSSymUtils { def unapply(name: TermName): Option[js.JSUnaryOp.Code] = map.get(name) - } - private object JSBinaryOpMethodName { + private object JSBinaryOpMethodName: private val map = Map( nme.ADD -> js.JSBinaryOp.+, nme.SUB -> js.JSBinaryOp.-, @@ -268,5 +244,3 @@ object JSSymUtils { def unapply(name: TermName): Option[js.JSBinaryOp.Code] = map.get(name) - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala index b911d7dfab96..9c8f7e50202a 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/JUnitBootstrappers.scala @@ -105,7 +105,7 @@ import dotty.tools.backend.sjs.JSDefinitions.jsdefn * parameters, notably the expected exception class. This should be handled at * some point in the future. */ -class JUnitBootstrappers extends MiniPhase { +class JUnitBootstrappers extends MiniPhase: import JUnitBootstrappers._ import ast.tpd._ @@ -118,31 +118,27 @@ class JUnitBootstrappers extends MiniPhase { // The actual transform ------------------------------- - override def transformPackageDef(tree: PackageDef)(using Context): Tree = { + override def transformPackageDef(tree: PackageDef)(using Context): Tree = val junitdefn = jsdefn.junit @tailrec - def hasTests(sym: ClassSymbol): Boolean = { + def hasTests(sym: ClassSymbol): Boolean = sym.info.decls.exists(m => m.is(Method) && m.hasAnnotation(junitdefn.TestAnnotClass)) || sym.superClass.exists && hasTests(sym.superClass.asClass) - } - def isTestClass(sym: Symbol): Boolean = { + def isTestClass(sym: Symbol): Boolean = sym.isClass && !sym.isOneOf(ModuleClass | Abstract | Trait) && hasTests(sym.asClass) - } - val bootstrappers = tree.stats.collect { + val bootstrappers = tree.stats.collect: case clDef: TypeDef if isTestClass(clDef.symbol) => genBootstrapper(clDef.symbol.asClass) - } if (bootstrappers.isEmpty) tree else cpy.PackageDef(tree)(tree.pid, tree.stats ::: bootstrappers) - } - private def genBootstrapper(testClass: ClassSymbol)(using Context): TypeDef = { + private def genBootstrapper(testClass: ClassSymbol)(using Context): TypeDef = val junitdefn = jsdefn.junit /* The name of the bootstrapper module. It is derived from the test class name by @@ -176,9 +172,8 @@ class JUnitBootstrappers extends MiniPhase { sbt.APIUtils.registerDummyClass(classSym) ClassDef(classSym, constr, defs) - } - private def genConstructor(owner: ClassSymbol)(using Context): DefDef = { + private def genConstructor(owner: ClassSymbol)(using Context): DefDef = val sym = newDefaultConstructor(owner).entered DefDef(sym, { Block( @@ -186,24 +181,21 @@ class JUnitBootstrappers extends MiniPhase { unitLiteral ) }) - } - private def genCallOnModule(owner: ClassSymbol, name: TermName, module: Symbol, annot: Symbol)(using Context): DefDef = { + private def genCallOnModule(owner: ClassSymbol, name: TermName, module: Symbol, annot: Symbol)(using Context): DefDef = val sym = newSymbol(owner, name, Synthetic | Method, MethodType(Nil, Nil, defn.UnitType)).entered DefDef(sym, { - if (module.exists) { + if (module.exists) val calls = annotatedMethods(module.moduleClass.asClass, annot) .map(m => Apply(ref(module).select(m), Nil)) Block(calls, unitLiteral) - } else { + else unitLiteral - } }) - } - private def genCallOnParam(owner: ClassSymbol, name: TermName, testClass: ClassSymbol, annot: Symbol)(using Context): DefDef = { + private def genCallOnParam(owner: ClassSymbol, name: TermName, testClass: ClassSymbol, annot: Symbol)(using Context): DefDef = val sym = newSymbol(owner, name, Synthetic | Method, MethodType(junitNme.instance :: Nil, defn.ObjectType :: Nil, defn.UnitType)).entered @@ -213,21 +205,20 @@ class JUnitBootstrappers extends MiniPhase { .map(m => Apply(instanceParamRef.cast(testClass.typeRef).select(m), Nil)) Block(calls, unitLiteral) }) - } - private def genTests(owner: ClassSymbol, tests: List[Symbol])(using Context): DefDef = { + private def genTests(owner: ClassSymbol, tests: List[Symbol])(using Context): DefDef = val junitdefn = jsdefn.junit val sym = newSymbol(owner, junitNme.tests, Synthetic | Method, MethodType(Nil, defn.ArrayOf(junitdefn.TestMetadataType))).entered DefDef(sym, { - val metadata = for (test <- tests) yield { + val metadata = for (test <- tests) yield val name = Literal(Constant(test.name.mangledString)) val ignored = Literal(Constant(test.hasAnnotation(junitdefn.IgnoreAnnotClass))) val testAnnot = test.getAnnotation(junitdefn.TestAnnotClass).get - val mappedArguments = testAnnot.arguments.flatMap{ + val mappedArguments = testAnnot.arguments.flatMap: // Since classOf[...] in annotations would not be transformed, grab the resulting class constant here case NamedArg(expectedName: SimpleName, TypeApply(Ident(nme.classOf), fstArg :: _)) if expectedName.toString == "expected" => Some(clsOf(fstArg.tpe)) @@ -235,23 +226,19 @@ class JUnitBootstrappers extends MiniPhase { case NamedArg(timeoutName: TermName, timeoutLiteral: Literal) if timeoutName.toString == "timeout" => Some(timeoutLiteral) case other => { - val shownName = other match { - case NamedArg(name, _) => name.show(using ctx) - case other => other.show(using ctx) + val shownName = other match + case NamedArg(name, _) => name.show(using ctx) + case other => other.show(using ctx) + report.error(em"$shownName is an unsupported argument for the JUnit @Test annotation in this position", other.sourcePos) + None } - report.error(em"$shownName is an unsupported argument for the JUnit @Test annotation in this position", other.sourcePos) - None - } - } val reifiedAnnot = resolveConstructor(junitdefn.TestAnnotType, mappedArguments) New(junitdefn.TestMetadataType, List(name, ignored, reifiedAnnot)) - } JavaSeqLiteral(metadata, TypeTree(junitdefn.TestMetadataType)) }) - } - private def genInvokeTest(owner: ClassSymbol, testClass: ClassSymbol, tests: List[Symbol])(using Context): DefDef = { + private def genInvokeTest(owner: ClassSymbol, testClass: ClassSymbol, tests: List[Symbol])(using Context): DefDef = val junitdefn = jsdefn.junit val sym = newSymbol(owner, junitNme.invokeTest, Synthetic | Method, @@ -272,33 +259,29 @@ class JUnitBootstrappers extends MiniPhase { } ) }) - } - private def genTestInvocation(testClass: ClassSymbol, testMethod: Symbol, instance: Tree)(using Context): Tree = { + private def genTestInvocation(testClass: ClassSymbol, testMethod: Symbol, instance: Tree)(using Context): Tree = val junitdefn = jsdefn.junit val resultType = testMethod.info.resultType - if (resultType.isRef(defn.UnitClass)) { + if (resultType.isRef(defn.UnitClass)) val newSuccess = ref(junitdefn.SuccessModule_apply).appliedTo(ref(defn.BoxedUnit_UNIT)) Block( instance.select(testMethod).appliedToNone :: Nil, ref(junitdefn.FutureModule_successful).appliedTo(newSuccess) ) - } else if (resultType.isRef(junitdefn.FutureClass)) { + else if (resultType.isRef(junitdefn.FutureClass)) instance.select(testMethod).appliedToNone - } else { + else // We lie in the error message to not expose that we support async testing. report.error("JUnit test must have Unit return type", testMethod.sourcePos) EmptyTree - } - } - private def genNewInstance(owner: ClassSymbol, testClass: ClassSymbol)(using Context): DefDef = { + private def genNewInstance(owner: ClassSymbol, testClass: ClassSymbol)(using Context): DefDef = val sym = newSymbol(owner, junitNme.newInstance, Synthetic | Method, MethodType(Nil, defn.ObjectType)).entered DefDef(sym, New(testClass.typeRef, Nil)) - } private def castParam(param: Symbol, clazz: Symbol)(using Context): Tree = ref(param).cast(clazz.typeRef) @@ -309,13 +292,12 @@ class JUnitBootstrappers extends MiniPhase { .filter(_.symbol.hasAnnotation(annot)) .map(_.symbol) .toList -} -object JUnitBootstrappers { +object JUnitBootstrappers: val name: String = "junitBootstrappers" val description: String = "generate JUnit-specific bootstrapper classes for Scala.js" - private object junitNme { + private object junitNme: val beforeClass: TermName = termName("beforeClass") val afterClass: TermName = termName("afterClass") val before: TermName = termName("before") @@ -327,6 +309,4 @@ object JUnitBootstrappers { val instance: TermName = termName("instance") val name: TermName = termName("name") val castInstance: TermName = termName("castInstance") - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala index 25ab46712e70..7b182caa49de 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSExports.scala @@ -24,13 +24,13 @@ import JSSymUtils._ import org.scalajs.ir.Names.DefaultModuleID import org.scalajs.ir.Trees.TopLevelExportDef.isValidTopLevelExportName -object PrepJSExports { +object PrepJSExports: import tpd._ import PrepJSInterop.{checkSetterSignature, isJSAny, isPrivateMaybeWithin} private sealed abstract class ExportDestination - private object ExportDestination { + private object ExportDestination: /** Export in the "normal" way: as an instance member, or at the top-level * for naturally top-level things (classes and modules). */ @@ -41,7 +41,6 @@ object PrepJSExports { /** Export as a static member of the companion class. */ case object Static extends ExportDestination - } private final case class ExportInfo(jsName: String, destination: ExportDestination)(val pos: SrcPos) @@ -50,54 +49,50 @@ object PrepJSExports { * Note that non-module Scala classes are never actually exported; their constructors are. * However, the checks are performed on the class when the class is annotated. */ - def checkClassOrModuleExports(sym: Symbol)(using Context): Unit = { + def checkClassOrModuleExports(sym: Symbol)(using Context): Unit = val exports = exportsOf(sym) if (exports.nonEmpty) checkClassOrModuleExports(sym, exports.head.pos) - } /** Generate the exporter for the given DefDef or ValDef. * * If this DefDef is a constructor, it is registered to be exported by * GenJSCode instead and no trees are returned. */ - def genExportMember(baseSym: Symbol)(using Context): List[Tree] = { + def genExportMember(baseSym: Symbol)(using Context): List[Tree] = val clsSym = baseSym.owner val exports = exportsOf(baseSym) // Helper function for errors - def err(msg: String): List[Tree] = { + def err(msg: String): List[Tree] = report.error(msg, exports.head.pos) Nil - } def memType = if (baseSym.isConstructor) "constructor" else "method" - if (exports.isEmpty) { + if (exports.isEmpty) Nil - } else if (!hasLegalExportVisibility(baseSym)) { + else if (!hasLegalExportVisibility(baseSym)) err(s"You may only export public and protected ${memType}s") - } else if (baseSym.is(Inline)) { + else if (baseSym.is(Inline)) err("You may not export an inline method") - } else if (isJSAny(clsSym)) { + else if (isJSAny(clsSym)) err(s"You may not export a $memType of a subclass of js.Any") - } else if (baseSym.isLocalToBlock) { + else if (baseSym.isLocalToBlock) err("You may not export a local definition") - } else if (hasIllegalRepeatedParam(baseSym)) { + else if (hasIllegalRepeatedParam(baseSym)) err(s"In an exported $memType, a *-parameter must come last (through all parameter lists)") - } else if (hasIllegalDefaultParam(baseSym)) { + else if (hasIllegalDefaultParam(baseSym)) err(s"In an exported $memType, all parameters with defaults must be at the end") - } else if (baseSym.isConstructor) { + else if (baseSym.isConstructor) // Constructors do not need an exporter method. We only perform the checks at this phase. checkClassOrModuleExports(clsSym, exports.head.pos) Nil - } else { + else assert(!baseSym.is(Bridge), s"genExportMember called for bridge symbol $baseSym") val normalExports = exports.filter(_.destination == ExportDestination.Normal) normalExports.flatMap(exp => genExportDefs(baseSym, exp.jsName, exp.pos.span)) - } - } /** Check a class or module for export. * @@ -105,7 +100,7 @@ object PrepJSExports { * - via `registerClassExports` * - via `genExportMember` (constructor of Scala class) */ - private def checkClassOrModuleExports(sym: Symbol, errPos: SrcPos)(using Context): Unit = { + private def checkClassOrModuleExports(sym: Symbol, errPos: SrcPos)(using Context): Unit = val isMod = sym.is(ModuleClass) def err(msg: String): Unit = @@ -114,43 +109,41 @@ object PrepJSExports { def hasAnyNonPrivateCtor: Boolean = sym.info.decl(nme.CONSTRUCTOR).hasAltWith(denot => !isPrivateMaybeWithin(denot.symbol)) - if (sym.is(Trait)) { + if (sym.is(Trait)) err("You may not export a trait") - } else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) { + else if (sym.hasAnnotation(jsdefn.JSNativeAnnot)) err("You may not export a native JS " + (if (isMod) "object" else "class")) - } else if (!hasLegalExportVisibility(sym)) { + else if (!hasLegalExportVisibility(sym)) err("You may only export public and protected " + (if (isMod) "objects" else "classes")) - } else if (isJSAny(sym.owner)) { + else if (isJSAny(sym.owner)) err("You may not export a " + (if (isMod) "object" else "class") + " in a subclass of js.Any") - } else if (sym.isLocalToBlock) { + else if (sym.isLocalToBlock) err("You may not export a local " + (if (isMod) "object" else "class")) - } else if (!sym.isStatic) { + else if (!sym.isStatic) if (isMod) err("You may not export a nested object") else err("You may not export a nested class. Create an exported factory method in the outer class to work around this limitation.") - } else if (sym.is(Abstract, butNot = Trait) && !isJSAny(sym)) { + else if (sym.is(Abstract, butNot = Trait) && !isJSAny(sym)) err("You may not export an abstract class") - } else if (!isMod && !hasAnyNonPrivateCtor) { + else if (!isMod && !hasAnyNonPrivateCtor) /* This test is only relevant for JS classes but doesn't hurt for Scala * classes as we could not reach it if there were only private * constructors. */ err("You may not export a class that has only private constructors") - } else { + else { // OK } - } /** Computes the ExportInfos for sym from its annotations. */ - private def exportsOf(sym: Symbol)(using Context): List[ExportInfo] = { - val trgSym = { + private def exportsOf(sym: Symbol)(using Context): List[ExportInfo] = + val trgSym = def isOwnerScalaClass = !sym.owner.is(ModuleClass) && !isJSAny(sym.owner) // For primary Scala class constructors, look on the class itself if (sym.isPrimaryConstructor && isOwnerScalaClass) sym.owner else sym - } val JSExportAnnot = jsdefn.JSExportAnnot val JSExportTopLevelAnnot = jsdefn.JSExportTopLevelAnnot @@ -165,16 +158,15 @@ object PrepJSExports { val isMember = !sym.isClass && !sym.isConstructor // Annotations for this member on the whole unit - val unitAnnots = { + val unitAnnots = if (isMember && sym.isPublic && !sym.is(Synthetic)) sym.owner.annotations.filter(_.symbol == JSExportAllAnnot) else Nil - } val allExportInfos = for { annot <- directAnnots ++ unitAnnots - } yield { + } yield val isExportAll = annot.symbol == JSExportAllAnnot val isTopLevelExport = annot.symbol == JSExportTopLevelAnnot val isStaticExport = annot.symbol == JSExportStaticAnnot @@ -185,37 +177,30 @@ object PrepJSExports { assert(!isTopLevelExport || hasExplicitName, em"Found a top-level export without an explicit name at ${exportPos.sourcePos}") - val name = { - if (hasExplicitName) { - annot.argumentConstantString(0).getOrElse { + val name = + if (hasExplicitName) + annot.argumentConstantString(0).getOrElse: report.error( em"The argument to ${annot.symbol.name} must be a literal string", annot.arguments(0)) "dummy" - } - } else { + else sym.defaultJSName - } - } - val destination = { - if (isTopLevelExport) { - val moduleID = if (annot.arguments.size == 1) { + val destination = + if (isTopLevelExport) + val moduleID = if (annot.arguments.size == 1) DefaultModuleID - } else { - annot.argumentConstantString(1).getOrElse { + else + annot.argumentConstantString(1).getOrElse: report.error("moduleID must be a literal string", annot.arguments(1)) DefaultModuleID - } - } ExportDestination.TopLevel(moduleID) - } else if (isStaticExport) { + else if (isStaticExport) ExportDestination.Static - } else { + else ExportDestination.Normal - } - } // Enforce proper setter signature if (sym.isJSSetter) @@ -228,24 +213,22 @@ object PrepJSExports { /* Illegal function application exports, i.e., method named 'apply' * without an explicit export name. */ - if (isMember && !hasExplicitName && sym.name == nme.apply) { - destination match { + if (isMember && !hasExplicitName && sym.name == nme.apply) + destination match case ExportDestination.Normal => - def shouldBeTolerated = { + def shouldBeTolerated = isExportAll && directAnnots.exists { annot => annot.symbol == JSExportAnnot && annot.arguments.nonEmpty && annot.argumentConstantString(0).contains("apply") } - } // Don't allow apply without explicit name - if (!shouldBeTolerated) { + if (!shouldBeTolerated) report.error( "A member cannot be exported to function application. " + "Add @JSExport(\"apply\") to export under the name apply.", exportPos) - } case _: ExportDestination.TopLevel => throw new AssertionError( @@ -256,35 +239,30 @@ object PrepJSExports { "A member cannot be exported to function application as static. " + "Use @JSExportStatic(\"apply\") to export it under the name 'apply'.", exportPos) - } - } val symOwner = if (sym.isConstructor) sym.owner.owner else sym.owner // Destination-specific restrictions - destination match { + destination match case ExportDestination.Normal => // Make sure we do not override the default export of toString - def isIllegalToString = { + def isIllegalToString = isMember && name == "toString" && sym.name != nme.toString_ && sym.info.paramInfoss.forall(_.isEmpty) && !sym.isJSGetter - } - if (isIllegalToString) { + if (isIllegalToString) report.error( "You may not export a zero-argument method named other than 'toString' under the name 'toString'", exportPos) - } // Disallow @JSExport at the top-level, as well as on objects and classes - if (symOwner.is(Package) || symOwner.isPackageObject) { + if (symOwner.is(Package) || symOwner.isPackageObject) report.error("@JSExport is forbidden on top-level definitions. Use @JSExportTopLevel instead.", exportPos) - } else if (!isMember && !sym.is(Trait)) { + else if (!isMember && !sym.is(Trait)) report.error( "@JSExport is forbidden on objects and classes. Use @JSExport'ed factory methods instead.", exportPos) - } case _: ExportDestination.TopLevel => if (sym.is(Lazy)) @@ -303,33 +281,28 @@ object PrepJSExports { report.error("The top-level export name must be a valid JavaScript identifier name", exportPos) case ExportDestination.Static => - def companionIsNonNativeJSClass: Boolean = { + def companionIsNonNativeJSClass: Boolean = val companion = symOwner.companionClass companion != NoSymbol && !companion.is(Trait) && isJSAny(companion) && !companion.hasAnnotation(jsdefn.JSNativeAnnot) - } - if (!symOwner.isStatic || !symOwner.is(ModuleClass) || !companionIsNonNativeJSClass) { + if (!symOwner.isStatic || !symOwner.is(ModuleClass) || !companionIsNonNativeJSClass) report.error( "Only a static object whose companion class is a non-native JS class may export its members as static.", exportPos) - } - if (isMember) { + if (isMember) if (sym.is(Lazy)) report.error("You may not export a lazy val as static", exportPos) - } else { + else if (sym.is(Trait)) report.error("You may not export a trait as static.", exportPos) else report.error("Implementation restriction: cannot export a class or object as static", exportPos) - } - } ExportInfo(name, destination)(exportPos) - } allExportInfos.filter(_.destination == ExportDestination.Normal) .groupBy(_.jsName) @@ -346,32 +319,26 @@ object PrepJSExports { * static and as top-level (it is possible to export a field several times * as top-level, though). */ - if (!sym.is(Method)) { - for (firstStatic <- allExportInfos.find(_.destination == ExportDestination.Static)) { - for (duplicate <- allExportInfos) { - duplicate.destination match { + if (!sym.is(Method)) + for (firstStatic <- allExportInfos.find(_.destination == ExportDestination.Static)) + for (duplicate <- allExportInfos) + duplicate.destination match case ExportDestination.Normal => // OK case ExportDestination.Static => - if (duplicate ne firstStatic) { + if (duplicate ne firstStatic) report.error( "Fields (val or var) cannot be exported as static more than once", duplicate.pos) - } case _: ExportDestination.TopLevel => report.error( "Fields (val or var) cannot be exported both as static and at the top-level", duplicate.pos) - } - } - } - } allExportInfos.distinct - } /** Generates an exporter for a DefDef including default parameter methods. */ - private def genExportDefs(defSym: Symbol, jsName: String, span: Span)(using Context): List[Tree] = { + private def genExportDefs(defSym: Symbol, jsName: String, span: Span)(using Context): List[Tree] = val clsSym = defSym.owner.asClass // Create symbol for new method @@ -388,22 +355,19 @@ object PrepJSExports { val exporter = genProxyDefDef(clsSym, defSym, expSym, span) // Construct exporters for default getters - val defaultGetters = if (!defSym.hasDefaultParams) { + val defaultGetters = if (!defSym.hasDefaultParams) Nil - } else { + else for { (param, i) <- defSym.paramSymss.flatten.zipWithIndex if param.is(HasDefault) - } yield { + } yield genExportDefaultGetter(clsSym, defSym, expSym, i, span) - } - } exporter :: defaultGetters - } private def genExportDefaultGetter(clsSym: ClassSymbol, trgMethod: Symbol, - exporter: Symbol, paramPos: Int, span: Span)(using Context): Tree = { + exporter: Symbol, paramPos: Int, span: Span)(using Context): Tree = // Get default getter method we'll copy val trgGetterDenot = defaultGetterDenot(clsSym, trgMethod, paramPos) @@ -412,31 +376,28 @@ object PrepJSExports { // Although the following must be true in a correct program, we cannot // assert, since a graceful failure message is only generated later - if (!trgGetterDenot.isOverloaded) { + if (!trgGetterDenot.isOverloaded) val trgGetter = trgGetterDenot.symbol val expGetterName = DefaultGetterName(exporter.name.asTermName, paramPos) val expGetter = newSymbol(clsSym, expGetterName, trgGetter.flags, trgGetter.info, trgGetter.privateWithin, coord = span).entered genProxyDefDef(clsSym, trgGetter, expGetter, span) - } else { + else EmptyTree - } - } private def defaultGetterDenot(targetSym: Symbol, sym: Symbol, paramIndex: Int)(using Context): Denotation = targetSym.info.member(DefaultGetterName(sym.name.asTermName, paramIndex)) /** generate a DefDef tree (from [[proxySym]]) that calls [[trgSym]] */ private def genProxyDefDef(clsSym: ClassSymbol, trgSym: Symbol, - proxySym: TermSymbol, span: Span)(using Context): Tree = { + proxySym: TermSymbol, span: Span)(using Context): Tree = DefDef(proxySym, { argss => This(clsSym).select(trgSym).appliedToArgss(argss) }).withSpan(span) - } /** Changes the final result type of a type `tpe` to Any. */ - private def finalResultTypeToAny(tpe: Type)(using Context): Type = tpe match { + private def finalResultTypeToAny(tpe: Type)(using Context): Type = tpe match case tpe: MethodType => MethodType(tpe.paramNames, tpe.paramInfos, finalResultTypeToAny(tpe.resultType)) case _: ExprType => @@ -447,21 +408,17 @@ object PrepJSExports { x => finalResultTypeToAny(tpe.resultType.subst(tpe, x))) case _ => defn.AnyType - } /** Whether the given symbol has a visibility that allows exporting */ private def hasLegalExportVisibility(sym: Symbol)(using Context): Boolean = sym.isPublic || sym.is(Protected, butNot = Local) /** Checks whether this type has a repeated parameter elsewhere than at the end of all the params. */ - private def hasIllegalRepeatedParam(sym: Symbol)(using Context): Boolean = { + private def hasIllegalRepeatedParam(sym: Symbol)(using Context): Boolean = val paramInfos = sym.info.paramInfoss.flatten paramInfos.nonEmpty && paramInfos.init.exists(_.isRepeatedParam) - } /** Checks whether there are default parameters not at the end of the flattened parameter list. */ - private def hasIllegalDefaultParam(sym: Symbol)(using Context): Boolean = { + private def hasIllegalDefaultParam(sym: Symbol)(using Context): Boolean = sym.hasDefaultParams && sym.paramSymss.flatten.reverse.dropWhile(_.is(HasDefault)).exists(_.is(HasDefault)) - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala index a2f9a0fb45a3..9888ef36f4bc 100644 --- a/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala +++ b/compiler/src/dotty/tools/dotc/transform/sjs/PrepJSInterop.scala @@ -67,7 +67,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP protected def newTransformer(using Context): Transformer = new ScalaJSPrepJSInteropTransformer - class ScalaJSPrepJSInteropTransformer extends Transformer with Checking { + class ScalaJSPrepJSInteropTransformer extends Transformer with Checking: import PrepJSExports._ /** Kind of the directly enclosing (most nested) owner. */ @@ -79,30 +79,27 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP /** Nicer syntax for `allEnclosingOwners is kind`. */ private def anyEnclosingOwner: OwnerKind = allEnclosingOwners - private def enterOwner[A](kind: OwnerKind)(body: => A): A = { + private def enterOwner[A](kind: OwnerKind)(body: => A): A = require(kind.isBaseKind, kind) val oldEnclosingOwner = enclosingOwner val oldAllEnclosingOwners = allEnclosingOwners enclosingOwner = kind allEnclosingOwners |= kind - try { + try body - } finally { + finally enclosingOwner = oldEnclosingOwner allEnclosingOwners = oldAllEnclosingOwners - } - } private var dynamicImportEnclosingClasses: Set[Symbol] = Set.empty - private def enterDynamicImportEnclosingClass[A](cls: Symbol)(body: => A): A = { + private def enterDynamicImportEnclosingClass[A](cls: Symbol)(body: => A): A = val saved = dynamicImportEnclosingClasses dynamicImportEnclosingClasses = saved + cls try body finally dynamicImportEnclosingClasses = saved - } private def hasImplicitThisPrefixToDynamicImportEnclosingClass(tpe: Type)(using Context): Boolean = tpe match @@ -114,8 +111,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP /** DefDefs in class templates that export methods to JavaScript */ private val exporters = mutable.Map.empty[Symbol, mutable.ListBuffer[Tree]] - override def transform(tree: Tree)(using Context): Tree = { - tree match { + override def transform(tree: Tree)(using Context): Tree = + tree match case tree: ValDef if tree.symbol.is(Module) => /* Never apply this transformation on the term definition of modules. * Instead, all relevant checks are performed on the module class definition. @@ -128,10 +125,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP case tree: MemberDef => transformMemberDef(tree) case tree: Template => transformTemplate(tree) case _ => transformStatOrExpr(tree) - } - } - private def transformMemberDef(tree: MemberDef)(using Context): Tree = { + private def transformMemberDef(tree: MemberDef)(using Context): Tree = val sym = tree.symbol checkInternalAnnotations(sym) @@ -144,21 +139,20 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP * - if not @js.native, verify that we do not use any other annotation * reserved for @js.native members (namely, JS native load spec annots) */ - val isJSNative = sym.getAnnotation(jsdefn.JSNativeAnnot) match { + val isJSNative = sym.getAnnotation(jsdefn.JSNativeAnnot) match case Some(annot) => checkJSNativeDefinition(tree, annot.tree, sym) true case None => checkJSNativeSpecificAnnotsOnNonJSNative(tree) false - } checkJSNameAnnots(sym) constFoldJSExportTopLevelAndStaticAnnotations(sym) markExposedIfRequired(tree.symbol) - tree match { + tree match case tree: TypeDef if tree.isClassDef => checkClassOrModuleExports(sym) @@ -182,66 +176,55 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP transformValOrDefDefInJSType(tree) else transformScalaValOrDefDef(tree) - } - } - private def transformScalaClassDef(tree: TypeDef)(using Context): Tree = { + private def transformScalaClassDef(tree: TypeDef)(using Context): Tree = val sym = tree.symbol // In native JS things, only js.Any stuff is allowed - if (enclosingOwner is OwnerKind.JSNative) { + if (enclosingOwner is OwnerKind.JSNative) /* We have to allow synthetic companion objects here, as they get * generated when a nested native JS class has default arguments in * its constructor (see #1891). */ - if (!sym.is(Synthetic)) { + if (!sym.is(Synthetic)) report.error( "Native JS traits, classes and objects cannot contain inner Scala traits, classes or objects (i.e., not extending js.Any)", tree) - } - } if (sym == jsdefn.PseudoUnionClass) sym.addAnnotation(jsdefn.JSTypeAnnot) - val kind = if (sym.isSubClass(jsdefn.scalaEnumeration.EnumerationClass)) { + val kind = if (sym.isSubClass(jsdefn.scalaEnumeration.EnumerationClass)) if (sym.is(Module)) OwnerKind.EnumMod else if (sym == jsdefn.scalaEnumeration.EnumerationClass) OwnerKind.EnumImpl else OwnerKind.EnumClass - } else { + else if (sym.is(Module)) OwnerKind.NonEnumScalaMod else OwnerKind.NonEnumScalaClass - } - enterOwner(kind) { + enterOwner(kind): super.transform(tree) - } - } - private def transformTemplate(tree: Template)(using Context): Template = { + private def transformTemplate(tree: Template)(using Context): Template = // First, recursively transform the template val transformedTree = super.transform(tree).asInstanceOf[Template] val clsSym = ctx.owner // Check that @JSExportStatic fields come first - if (clsSym.is(ModuleClass)) { // quick check to avoid useless work + if (clsSym.is(ModuleClass)) // quick check to avoid useless work var foundStatOrNonStaticVal: Boolean = false - for (tree <- transformedTree.body) { - tree match { + for (tree <- transformedTree.body) + tree match case vd: ValDef if vd.symbol.hasAnnotation(jsdefn.JSExportStaticAnnot) => - if (foundStatOrNonStaticVal) { + if (foundStatOrNonStaticVal) report.error( "@JSExportStatic vals and vars must be defined before any other val/var, and before any constructor statement.", vd) - } case vd: ValDef if !vd.symbol.is(Lazy) => foundStatOrNonStaticVal = true case _: MemberDef => case _ => foundStatOrNonStaticVal = true - } - } - } // Add exports to the template, if there are any exporters.get(clsSym).fold { @@ -257,34 +240,30 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP transformedTree.body ::: exports.toList ) } - } - private def transformStatOrExpr(tree: Tree)(using Context): Tree = { - tree match { + private def transformStatOrExpr(tree: Tree)(using Context): Tree = + tree match case Closure(env, call, functionalInterface) => val tpeSym = functionalInterface.tpe.typeSymbol - if (tpeSym.isJSType) { - def reportError(reasonAndExplanation: String): Unit = { + if (tpeSym.isJSType) + def reportError(reasonAndExplanation: String): Unit = report.error( em"Using an anonymous function as a SAM for the JavaScript type ${ tpeSym.fullName } is not allowed because $reasonAndExplanation", tree) - } - if (!tpeSym.is(Trait) || tpeSym.asClass.superClass != jsdefn.JSFunctionClass) { + if (!tpeSym.is(Trait) || tpeSym.asClass.superClass != jsdefn.JSFunctionClass) reportError( "it is not a trait extending js.Function. " + "Use an anonymous class instead.") - } else if (tpeSym.hasAnnotation(jsdefn.JSNativeAnnot)) { + else if (tpeSym.hasAnnotation(jsdefn.JSNativeAnnot)) reportError( "it is a native JS type. " + "It is not possible to directly implement it.") - } else if (!tree.tpe.possibleSamMethods.exists(_.symbol.hasJSCallCallingConvention)) { + else if (!tree.tpe.possibleSamMethods.exists(_.symbol.hasJSCallCallingConvention)) reportError( "its single abstract method is not named `apply`. " + "Use an anonymous class instead.") - } - } super.transform(tree) // Validate js.constructorOf[T] @@ -320,9 +299,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP // new DynamicImportThunk { def apply(): Any = body } val dynamicImportThunkAnonClass = AnonClass(currentOwner, List(jsdefn.DynamicImportThunkType), span) { cls => val applySym = newSymbol(cls, nme.apply, Method, MethodType(Nil, Nil, defn.AnyType), coord = span).entered - val transformedBody = enterDynamicImportEnclosingClass(enclosingClass) { + val transformedBody = enterDynamicImportEnclosingClass(enclosingClass): transform(body) - } val newBody = transformedBody.changeOwnerAfter(currentOwner, applySym, thisPhase) val applyDefDef = DefDef(applySym, newBody) List(applyDefDef) @@ -346,13 +324,12 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP if fun.symbol == jsdefn.JSDynamicLiteral_applyDynamic || fun.symbol == jsdefn.JSDynamicLiteral_applyDynamicNamed => // Check that the first argument list is a constant string "apply" - nameArgs match { + nameArgs match case List(Literal(Constant(s: String))) => if (s != "apply") report.error(em"js.Dynamic.literal does not have a method named $s", tree) case _ => report.error(em"js.Dynamic.literal.${tree.symbol.name} may not be called directly", tree) - } // TODO Warn for known duplicate property names @@ -400,26 +377,22 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP case _ => super.transform(tree) - } - } private def isNullLiteral(tree: Tree): Boolean = tree match case Literal(Constant(null)) => true case _ => false - private def validateJSConstructorOf(tree: Tree, tpeArg: Tree)(using Context): Unit = { + private def validateJSConstructorOf(tree: Tree, tpeArg: Tree)(using Context): Unit = val tpe = checkClassType(tpeArg.tpe, tpeArg.srcPos, traitReq = false, stablePrefixReq = false) - tpe.underlyingClassRef(refinementOK = false) match { + tpe.underlyingClassRef(refinementOK = false) match case typeRef: TypeRef if typeRef.symbol.isOneOf(Trait | ModuleClass) => report.error(em"non-trait class type required but $tpe found", tpeArg) case _ => // an error was already reported above - } - } /** Performs checks and rewrites specific to classes / objects extending `js.Any`. */ - private def transformJSClassDef(classDef: TypeDef)(using Context): Tree = { + private def transformJSClassDef(classDef: TypeDef)(using Context): Tree = val sym = classDef.symbol.asClass val isJSNative = sym.hasAnnotation(jsdefn.JSNativeAnnot) @@ -437,30 +410,27 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP report.error("Package objects may not extend js.Any.", classDef) // Check that we do not have a case modifier - if (sym.is(Case)) { + if (sym.is(Case)) report.error( "Classes and objects extending js.Any may not have a case modifier", classDef) - } // Check the parents - for (parentSym <- sym.parentSyms) { - parentSym match { + for (parentSym <- sym.parentSyms) + parentSym match case parentSym if parentSym == defn.ObjectClass => // AnyRef is valid, except for non-native JS classes and objects - if (!isJSNative && !sym.is(Trait)) { + if (!isJSNative && !sym.is(Trait)) report.error( "Non-native JS classes and objects cannot directly extend AnyRef. They must extend a JS class (native or not).", classDef) - } case parentSym if isJSAny(parentSym) => // A non-native JS type cannot extend a native JS trait // Otherwise, extending a JS type is valid - if (!isJSNative && parentSym.is(Trait) && parentSym.hasAnnotation(jsdefn.JSNativeAnnot)) { + if (!isJSNative && parentSym.is(Trait) && parentSym.hasAnnotation(jsdefn.JSNativeAnnot)) report.error( "Non-native JS types cannot directly extend native JS traits.", classDef) - } case parentSym if parentSym == defn.DynamicClass => /* We have to allow scala.Dynamic to be able to define js.Dynamic * and similar constructs. @@ -473,52 +443,45 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP report.error( em"${sym.name} extends ${parentSym.fullName} which does not extend js.Any.", classDef) - } - } // Checks for non-native JS stuff - if (!isJSNative) { + if (!isJSNative) // It cannot be in a native JS class or trait - if (enclosingOwner is OwnerKind.JSNativeClass) { + if (enclosingOwner is OwnerKind.JSNativeClass) report.error( "Native JS classes and traits cannot contain non-native JS classes, traits or objects", classDef) - } // Unless it is a trait, it cannot be in a native JS object - if (!sym.is(Trait) && (enclosingOwner is OwnerKind.JSNativeMod)) { + if (!sym.is(Trait) && (enclosingOwner is OwnerKind.JSNativeMod)) report.error( "Native JS objects cannot contain inner non-native JS classes or objects", classDef) - } // Local JS classes cannot be abstract (implementation restriction) - if (sym.is(Abstract, butNot = Trait) && sym.isLocalToBlock) { + if (sym.is(Abstract, butNot = Trait) && sym.isLocalToBlock) report.error( "Implementation restriction: local JS classes cannot be abstract", classDef) - } - } // Check for consistency of JS semantics across overriding val overridingPairsCursor = new OverridingPairs.Cursor(sym) - while (overridingPairsCursor.hasNext) { + while (overridingPairsCursor.hasNext) val overriding = overridingPairsCursor.overriding val overridden = overridingPairsCursor.overridden overridingPairsCursor.next() // prepare for next turn val clsSym = sym - if (overriding.isTerm) { - def errorPos = { + if (overriding.isTerm) + def errorPos = if (clsSym == overriding.owner) overriding.srcPos else if (clsSym == overridden.owner) overridden.srcPos else clsSym.srcPos - } // Some utils inspired by RefChecks - def infoString0(sym: Symbol, showLocation: Boolean): String = { + def infoString0(sym: Symbol, showLocation: Boolean): String = val sym1 = sym.underlyingSymbol def info = clsSym.thisType.memberInfo(sym1) val infoStr = @@ -526,17 +489,15 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP else i" of type $info" val ccStr = s" called from JS as '${sym.jsCallingConvention.displayName}'" i"${if (showLocation) sym1.showLocated else sym1}$infoStr$ccStr" - } def infoString(sym: Symbol): String = infoString0(sym, sym.owner != clsSym) def infoStringWithLocation(sym: Symbol): String = infoString0(sym, true) - def emitOverrideError(msg: String): Unit = { + def emitOverrideError(msg: String): Unit = report.error( em"""error overriding ${infoStringWithLocation(overridden)}; | ${infoString(overriding)} $msg""", errorPos) - } // Check for overrides with different JS names - issue scala-js/scala-js#1983 if (overriding.jsCallingConvention != overridden.jsCallingConvention) @@ -546,66 +507,53 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP * at this point the symbols do not have @JSOptional yet, so we need * to detect whether it would be applied. */ - if (!isJSNative) { - def isJSOptional(sym: Symbol): Boolean = { + if (!isJSNative) + def isJSOptional(sym: Symbol): Boolean = sym.owner.is(Trait) && !sym.is(Deferred) && !sym.isConstructor && !sym.owner.hasAnnotation(jsdefn.JSNativeAnnot) - } if (isJSOptional(overriding) && !(overridden.is(Deferred) || isJSOptional(overridden))) emitOverrideError("cannot override a concrete member in a non-native JS trait") - } - } - } - val kind = { - if (!isJSNative) { + val kind = + if (!isJSNative) if (sym.is(ModuleClass)) OwnerKind.JSMod else if (sym.is(Trait)) OwnerKind.JSTrait else OwnerKind.JSNonTraitClass - } else { + else if (sym.is(ModuleClass)) OwnerKind.JSNativeMod else OwnerKind.JSNativeClass - } - } - enterOwner(kind) { + enterOwner(kind): super.transform(classDef) - } - } - private def checkJSNativeDefinition(treePos: SrcPos, annotPos: SrcPos, sym: Symbol)(using Context): Unit = { + private def checkJSNativeDefinition(treePos: SrcPos, annotPos: SrcPos, sym: Symbol)(using Context): Unit = // Check if we may have a JS native here - if (sym.isLocalToBlock) { + if (sym.isLocalToBlock) report.error("@js.native is not allowed on local definitions", annotPos) - } else if (!sym.isClass && (anyEnclosingOwner is (OwnerKind.ScalaClass | OwnerKind.JSType))) { + else if (!sym.isClass && (anyEnclosingOwner is (OwnerKind.ScalaClass | OwnerKind.JSType))) report.error("@js.native vals and defs can only appear in static Scala objects", annotPos) - } else if (sym.isClass && !isJSAny(sym)) { + else if (sym.isClass && !isJSAny(sym)) report.error("Classes, traits and objects not extending js.Any may not have an @js.native annotation", annotPos) - } else if (anyEnclosingOwner is OwnerKind.ScalaClass) { + else if (anyEnclosingOwner is OwnerKind.ScalaClass) report.error("Scala traits and classes may not have native JS members", annotPos) - } else if (enclosingOwner is OwnerKind.JSNonNative) { + else if (enclosingOwner is OwnerKind.JSNonNative) report.error("non-native JS classes, traits and objects may not have native JS members", annotPos) - } else { + else // The symbol can be annotated with @js.native. Now check its JS native loading spec. - if (sym.is(Trait)) { - for (annot <- sym.annotations) { + if (sym.is(Trait)) + for (annot <- sym.annotations) val annotSym = annot.symbol if (isJSNativeLoadingSpecAnnot(annotSym)) report.error(em"Traits may not have an @${annotSym.name} annotation.", annot.tree) - } - } else { + else checkJSNativeLoadSpecOf(treePos, sym) - } - } - } - private def checkJSNativeLoadSpecOf(pos: SrcPos, sym: Symbol)(using Context): Unit = { + private def checkJSNativeLoadSpecOf(pos: SrcPos, sym: Symbol)(using Context): Unit = - def checkGlobalRefName(globalRef: String): Unit = { + def checkGlobalRefName(globalRef: String): Unit = if (!JSGlobalRef.isValidJSGlobalRefName(globalRef)) report.error(em"The name of a JS global variable must be a valid JS identifier (got '$globalRef')", pos) - } - if (enclosingOwner is OwnerKind.JSNative) { + if (enclosingOwner is OwnerKind.JSNative) /* We cannot get here for @js.native vals and defs. That would mean we * have an @js.native val/def inside a JavaScript type, which is not * allowed and already caught in checkJSNativeDefinition(). @@ -613,79 +561,67 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP assert(sym.isClass, s"undetected @js.native val or def ${sym.fullName} inside JS type at $pos") - for (annot <- sym.annotations) { + for (annot <- sym.annotations) val annotSym = annot.symbol if (isJSNativeLoadingSpecAnnot(annotSym)) report.error(em"Nested JS classes and objects cannot have an @${annotSym.name} annotation.", annot.tree) - } - if (sym.owner.isStaticOwner) { - for (annot <- sym.annotations) { - if (annot.symbol == jsdefn.JSNameAnnot && !(annot.arguments.head.tpe.derivesFrom(defn.StringClass))) { + if (sym.owner.isStaticOwner) + for (annot <- sym.annotations) + if (annot.symbol == jsdefn.JSNameAnnot && !(annot.arguments.head.tpe.derivesFrom(defn.StringClass))) report.error( "Implementation restriction: " + "@JSName with a js.Symbol is not supported on nested native classes and objects", annot.tree) - } - } - if (sym.owner.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) { - val jsName = sym.jsName match { + if (sym.owner.hasAnnotation(jsdefn.JSGlobalScopeAnnot)) + val jsName = sym.jsName match case JSName.Literal(jsName) => checkGlobalRefName(jsName) case JSName.Computed(_) => () // compile error above or in `checkJSNameArgument` - } - } - } - } else { - def checkGlobalRefPath(pathName: String): Unit = { + else + def checkGlobalRefPath(pathName: String): Unit = val dotIndex = pathName.indexOf('.') val globalRef = if (dotIndex < 0) pathName else pathName.substring(0, dotIndex).nn checkGlobalRefName(globalRef) - } - checkAndGetJSNativeLoadingSpecAnnotOf(pos, sym) match { + checkAndGetJSNativeLoadingSpecAnnotOf(pos, sym) match case Some(annot) if annot.symbol == jsdefn.JSGlobalScopeAnnot => - if (!sym.is(Module)) { + if (!sym.is(Module)) report.error( "@JSGlobalScope can only be used on native JS objects (with @js.native).", annot.tree) - } case Some(annot) if annot.symbol == jsdefn.JSGlobalAnnot => checkJSGlobalLiteral(annot) - val pathName = annot.argumentConstantString(0).getOrElse { + val pathName = annot.argumentConstantString(0).getOrElse: val symTermName = sym.name.exclude(NameKinds.ModuleClassName).toTermName - if (symTermName == nme.apply) { + if (symTermName == nme.apply) report.error( "Native JS definitions named 'apply' must have an explicit name in @JSGlobal", annot.tree) - } else if (symTermName.isSetterName) { + else if (symTermName.isSetterName) report.error( "Native JS definitions with a name ending in '_=' must have an explicit name in @JSGlobal", annot.tree) - } sym.defaultJSName - } checkGlobalRefPath(pathName) case Some(annot) if annot.symbol == jsdefn.JSImportAnnot => checkJSImportLiteral(annot) - if (annot.arguments.sizeIs < 2) { + if (annot.arguments.sizeIs < 2) val symTermName = sym.name.exclude(NameKinds.ModuleClassName).toTermName - if (symTermName == nme.apply) { + if (symTermName == nme.apply) report.error( "Native JS definitions named 'apply' must have an explicit name in @JSImport", annot.tree) - } else if (symTermName.isSetterName) { + else if (symTermName.isSetterName) report.error( "Native JS definitions with a name ending in '_=' must have an explicit name in @JSImport", annot.tree) - } - } annot.argumentConstantString(2).foreach { globalPathName => checkGlobalRefPath(globalPathName) } @@ -693,23 +629,19 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP case _ => // We already emitted an error in checkAndGetJSNativeLoadingSpecAnnotOf () - } - } - } /** Transforms a non-`@js.native` ValDef or DefDef in a Scala class. */ - private def transformScalaValOrDefDef(tree: ValOrDefDef)(using Context): Tree = { - tree match { + private def transformScalaValOrDefDef(tree: ValOrDefDef)(using Context): Tree = + tree match // Catch ValDefs in enumerations with simple calls to Value case vd: ValDef if (enclosingOwner is OwnerKind.Enum) && jsdefn.scalaEnumeration.isValueMethodNoName(vd.rhs.symbol) => val enumDefn = jsdefn.scalaEnumeration // Extract the Int argument if it is present - val optIntArg = vd.rhs match { + val optIntArg = vd.rhs match case _:Select | _:Ident => None case Apply(_, intArg :: Nil) => Some(intArg) - } val defaultName = vd.name.getterName.encode.toString @@ -727,22 +659,19 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP val condTree = nullCompTree.select(nme.ZAND).appliedTo(hasNextTree) val nameTree = If(condTree, nextNameTree.select(enumDefn.next).appliedToNone, Literal(Constant(defaultName))) - val newRhs = optIntArg match { + val newRhs = optIntArg match case None => This(thisClass).select(enumDefn.Enumeration_Value_StringArg).appliedTo(nameTree) case Some(intArg) => This(thisClass).select(enumDefn.Enumeration_Value_IntStringArg).appliedTo(intArg, nameTree) - } cpy.ValDef(vd)(rhs = newRhs) case _ => super.transform(tree) - } - } /** Verify a ValOrDefDef that is annotated with `@js.native`. */ - private def transformJSNativeValOrDefDef(tree: ValOrDefDef)(using Context): ValOrDefDef = { + private def transformJSNativeValOrDefDef(tree: ValOrDefDef)(using Context): ValOrDefDef = val sym = tree.symbol def annotPos(annotSym: Symbol): SrcPos = @@ -759,22 +688,20 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP // Check that we do not override or implement anything from a superclass val overriddenSymbols = sym.allOverriddenSymbols - if (overriddenSymbols.hasNext) { + if (overriddenSymbols.hasNext) val overridden = overriddenSymbols.next() val verb = if (overridden.is(Deferred)) "implement" else "override" report.error(em"An @js.native member cannot $verb the inherited member ${overridden.fullName}", tree) - } tree - } /** Verify a ValOrDefDef inside a js.Any */ - private def transformValOrDefDefInJSType(tree: ValOrDefDef)(using Context): Tree = { + private def transformValOrDefDefInJSType(tree: ValOrDefDef)(using Context): Tree = val sym = tree.symbol assert(!sym.isLocalToBlock, i"$tree at ${tree.span}") - sym.name match { + sym.name match case nme.apply if !sym.hasAnnotation(jsdefn.JSNameAnnot) && (!sym.is(Method) || sym.isJSGetter) => report.error( "A member named apply represents function application in JavaScript. " + @@ -797,13 +724,12 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP sym) case _ => - } if (sym.isJSSetter) checkSetterSignature(sym, tree, exported = false) - if (enclosingOwner is OwnerKind.JSNonNative) { - JSCallingConvention.of(sym) match { + if (enclosingOwner is OwnerKind.JSNonNative) + JSCallingConvention.of(sym) match case JSCallingConvention.Property(_) => // checked above case JSCallingConvention.Method(_) => // no checks needed @@ -815,18 +741,16 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP * JS function SAM trait. */ val owner = sym.owner - val isPlausibleJSFunctionType = { + val isPlausibleJSFunctionType = owner.is(Trait) && owner.asClass.superClass == jsdefn.JSFunctionClass && owner.typeRef.possibleSamMethods.map(_.symbol) == Seq(sym) && !sym.info.isInstanceOf[PolyType] - } - if (!isPlausibleJSFunctionType) { + if (!isPlausibleJSFunctionType) report.error( "A non-native JS type can only declare an abstract method named `apply` without `@JSName` " + "if it is the SAM of a trait that extends js.Function", tree) - } case JSCallingConvention.BracketAccess => report.error("@JSBracketAccess is not allowed in non-native JS classes", tree) @@ -836,16 +760,14 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP report.error("A non-native JS class cannot declare a method named like a unary operation without `@JSName`", tree) case JSCallingConvention.BinaryOp(_) => report.error("A non-native JS class cannot declare a method named like a binary operation without `@JSName`", tree) - } - } else { - def checkNoDefaultOrRepeated(subject: String) = { + else + def checkNoDefaultOrRepeated(subject: String) = if (sym.info.paramInfoss.flatten.exists(_.isRepeatedParam)) report.error(s"$subject may not have repeated parameters", tree) if (sym.hasDefaultParams) report.error(s"$subject may not have default parameters", tree) - } - JSCallingConvention.of(sym) match { + JSCallingConvention.of(sym) match case JSCallingConvention.Property(_) => // checked above case JSCallingConvention.Method(_) => // no checks needed case JSCallingConvention.Call => // no checks needed @@ -865,76 +787,67 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP case JSCallingConvention.BracketCall => // JS bracket calls must have at least one non-repeated parameter - sym.info.stripPoly match { + sym.info.stripPoly match case mt: MethodType if mt.paramInfos.nonEmpty && !mt.paramInfos.head.isRepeatedParam => // ok case _ => report.error("@JSBracketCall methods must have at least one non-repeated parameter", tree) - } - } - } - if (sym.hasAnnotation(defn.NativeAnnot)) { + if (sym.hasAnnotation(defn.NativeAnnot)) // Native methods are not allowed report.error("Methods in a js.Any may not be @native", tree) - } /* In native JS types, there should not be any private member, except * private[this] constructors. */ - if ((enclosingOwner is OwnerKind.JSNative) && isPrivateMaybeWithin(sym)) { - if (sym.isClassConstructor) { - if (!sym.isAllOf(PrivateLocal)) { + if ((enclosingOwner is OwnerKind.JSNative) && isPrivateMaybeWithin(sym)) + if (sym.isClassConstructor) + if (!sym.isAllOf(PrivateLocal)) report.error( "Native JS classes may not have private constructors. " + "Use `private[this]` to declare an internal constructor.", sym) - } - } else if (!sym.is(ParamAccessor)) { + else if (!sym.is(ParamAccessor)) report.error( "Native JS classes may not have private members. " + "Use a public member in a private facade instead.", tree) - } - } - if (enclosingOwner is OwnerKind.JSNonNative) { + if (enclosingOwner is OwnerKind.JSNonNative) // Private methods cannot be overloaded - if (sym.is(Method) && isPrivateMaybeWithin(sym)) { + if (sym.is(Method) && isPrivateMaybeWithin(sym)) val alts = sym.owner.info.memberBasedOnFlags(sym.name, required = Method) - if (alts.isOverloaded) { + if (alts.isOverloaded) report.error( "Private methods in non-native JS classes cannot be overloaded. Use different names instead.", tree) - } - } // private[Scope] methods must be final if (!sym.isOneOf(Final | Protected) && sym.privateWithin.exists && !sym.isClassConstructor) report.error("Qualified private members in non-native JS classes must be final", tree) // Traits must be pure interfaces, except for js.undefined members - if (sym.owner.is(Trait) && sym.isTerm && !sym.isConstructor) { - if (sym.is(Method) && isPrivateMaybeWithin(sym)) { + if (sym.owner.is(Trait) && sym.isTerm && !sym.isConstructor) + if (sym.is(Method) && isPrivateMaybeWithin(sym)) report.error("A non-native JS trait cannot contain private members", tree) - } else if (sym.is(Lazy)) { + else if (sym.is(Lazy)) report.error("A non-native JS trait cannot contain lazy vals", tree) - } else if (sym.is(ParamAccessor)) { + else if (sym.is(ParamAccessor)) // #12621 report.error("A non-native JS trait cannot have constructor parameters", tree) - } else if (!sym.is(Deferred)) { + else if (!sym.is(Deferred)) /* Tell the back-end not to emit this thing. In fact, this only * matters for mixed-in members created from this member. */ sym.addAnnotation(jsdefn.JSOptionalAnnot) - if (!sym.isSetter) { + if (!sym.isSetter) // Check that methods do not have parens if (sym.is(Method, butNot = Accessor) && sym.info.stripPoly.isInstanceOf[MethodType]) report.error("In non-native JS traits, defs with parentheses must be abstract.", tree.rhs) // Check that the rhs is `js.undefined` - tree.rhs match { + tree.rhs match case sel: Select if sel.symbol == jsdefn.JSPackage_undefined => // ok case Apply(Apply(TypeApply(fromTypeConstructorFun, _), (sel: Select) :: Nil), _) @@ -945,11 +858,7 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP report.error( "Members of non-native JS traits must either be abstract, or their right-hand-side must be `js.undefined`.", tree) - } - } - } - } - } else { // enclosingOwner isnt OwnerKind.JSNonNative + else // enclosingOwner isnt OwnerKind.JSNonNative // Check that the rhs is valid if (sym.isPrimaryConstructor || sym.isOneOf(Param | ParamAccessor | Deferred | Synthetic) || sym.name.is(DefaultGetterName) || sym.isSetter) { @@ -961,9 +870,9 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP * - abstract members * - synthetic members (to avoid double errors with case classes, e.g. generated copy method) */ - } else if (sym.isConstructor) { + } else if (sym.isConstructor) // Force secondary ctor to have only a call to the primary ctor inside - tree.rhs match { + tree.rhs match case Block(List(Apply(trg, _)), Literal(Constant(()))) if trg.symbol.isPrimaryConstructor && trg.symbol.owner == sym.owner => // everything is fine here @@ -971,15 +880,11 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP report.error( "A secondary constructor of a native JS class may only call the primary constructor", tree.rhs) - } - } else { + else // Check that the tree's rhs is exactly `= js.native` checkRHSCallsJSNative(tree, "Concrete members of JS native types") - } - } super.transform(tree) - } /** Removes annotations from exported definitions (e.g. `export foo.bar`): * - `js.native` @@ -1000,12 +905,12 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP sym.annotations = newAnnots end stripJSAnnotsOnExported - private def checkRHSCallsJSNative(tree: ValOrDefDef, longKindStr: String)(using Context): Unit = { + private def checkRHSCallsJSNative(tree: ValOrDefDef, longKindStr: String)(using Context): Unit = if tree.symbol.is(Exported) then return // we already report an error that exports are not allowed here, this prevents extra errors. // Check that the rhs is exactly `= js.native` - tree.rhs match { + tree.rhs match case sel: Select if sel.symbol == jsdefn.JSPackage_native => // ok case rhs: Ident if rhs.symbol == jsdefn.JSPackage_native => @@ -1013,17 +918,15 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP case _ => val pos = if (tree.rhs != EmptyTree) tree.rhs.srcPos else tree.srcPos report.error(s"$longKindStr may only call js.native.", pos) - } // Check that the result type was explicitly specified // (This is stronger than Scala 2, which only warns, and only if it was inferred as Nothing.) if (tree.tpt.isInstanceOf[InferredTypeTree]) report.error(em"The type of ${tree.name} must be explicitly specified because it is JS native.", tree) - } - private def checkJSNativeSpecificAnnotsOnNonJSNative(memberDef: MemberDef)(using Context): Unit = { - for (annot <- memberDef.symbol.annotations) { - annot.symbol match { + private def checkJSNativeSpecificAnnotsOnNonJSNative(memberDef: MemberDef)(using Context): Unit = + for (annot <- memberDef.symbol.annotations) + annot.symbol match case annotSym if annotSym == jsdefn.JSGlobalAnnot => report.error("@JSGlobal can only be used on native JS definitions (with @js.native).", annot.tree) case annotSym if annotSym == jsdefn.JSImportAnnot => @@ -1032,14 +935,11 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP report.error("@JSGlobalScope can only be used on native JS objects (with @js.native).", annot.tree) case _ => // ok - } - } - } - private def checkJSNameAnnots(sym: Symbol)(using Context): Unit = { + private def checkJSNameAnnots(sym: Symbol)(using Context): Unit = val allJSNameAnnots = sym.annotations.filter(_.symbol == jsdefn.JSNameAnnot).reverse - for (annot <- allJSNameAnnots.headOption) { + for (annot <- allJSNameAnnots.headOption) // Check everything about the first @JSName annotation if (sym.isLocalToBlock || (enclosingOwner isnt OwnerKind.JSType)) report.error("@JSName can only be used on members of JS types.", annot.tree) @@ -1053,33 +953,28 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP // Check that there is at most one @JSName annotation. for (duplicate <- allJSNameAnnots.tail) report.error("Duplicate @JSName annotation.", duplicate.tree) - } - } /** Checks that the argument to `@JSName` annotations on `memberSym` is legal. * * Reports an error on each annotation where this is not the case. * One one `@JSName` annotation is allowed, but that is handled somewhere else. */ - private def checkJSNameArgument(memberSym: Symbol, annot: Annotation)(using Context): Unit = { + private def checkJSNameArgument(memberSym: Symbol, annot: Annotation)(using Context): Unit = val argTree = annot.arguments.head - if (argTree.tpe.derivesFrom(defn.StringClass)) { + if (argTree.tpe.derivesFrom(defn.StringClass)) // We have a String. It must be a literal. if (!annot.argumentConstantString(0).isDefined) report.error("A String argument to JSName must be a literal string", argTree) - } else { + else // We have a js.Symbol. It must be a stable reference. val sym = argTree.symbol - if (!sym.isStatic || !sym.isStableMember) { + if (!sym.isStatic || !sym.isStableMember) report.error("A js.Symbol argument to JSName must be a static, stable identifier", argTree) - } else if ((enclosingOwner is OwnerKind.JSNonNative) && sym.owner == memberSym.owner) { + else if ((enclosingOwner is OwnerKind.JSNonNative) && sym.owner == memberSym.owner) report.warning( "This symbol is defined in the same object as the annotation's target. " + "This will cause a stackoverflow at runtime", argTree) - } - } - } /** Constant-folds arguments to `@JSExportTopLevel` and `@JSExportStatic`. * @@ -1088,22 +983,20 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP * annotations as literal strings, so we specifically constant-fold them * here. */ - private def constFoldJSExportTopLevelAndStaticAnnotations(sym: Symbol)(using Context): Unit = { + private def constFoldJSExportTopLevelAndStaticAnnotations(sym: Symbol)(using Context): Unit = val annots = sym.annotations val newAnnots = annots.mapConserve { annot => - if (annot.symbol == jsdefn.JSExportTopLevelAnnot || annot.symbol == jsdefn.JSExportStaticAnnot) { - annot.tree match { + if (annot.symbol == jsdefn.JSExportTopLevelAnnot || annot.symbol == jsdefn.JSExportStaticAnnot) + annot.tree match case app @ Apply(fun, args) => val newArgs = args.mapConserve { arg => - arg match { + arg match case _: Literal => arg case _ => - arg.tpe.widenTermRefExpr.normalized match { + arg.tpe.widenTermRefExpr.normalized match case ConstantType(c) => Literal(c).withSpan(arg.span) case _ => arg // PrepJSExports will emit an error for those cases - } - } } if (newArgs eq args) annot @@ -1111,14 +1004,11 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP Annotation(cpy.Apply(app)(fun, newArgs)) case _ => annot - } - } else { + else annot - } } if (newAnnots ne annots) sym.annotations = newAnnots - } /** Mark the symbol as exposed if it is a non-private term member of a * non-native JS class. @@ -1127,8 +1017,8 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP * The symbol, which must be the module symbol for a module, not its * module class symbol. */ - private def markExposedIfRequired(sym: Symbol)(using Context): Unit = { - val shouldBeExposed: Boolean = { + private def markExposedIfRequired(sym: Symbol)(using Context): Unit = + val shouldBeExposed: Boolean = // it is a term member sym.isTerm && // it is a member of a non-native JS class @@ -1141,19 +1031,16 @@ class PrepJSInterop extends MacroTransform with IdentityDenotTransformer { thisP !sym.isConstructor && // it is not a default getter !sym.name.is(DefaultGetterName) - } if (shouldBeExposed) sym.addAnnotation(jsdefn.ExposedJSMemberAnnot) - } - } } -object PrepJSInterop { +object PrepJSInterop: val name: String = "prepjsinterop" val description: String = "additional checks and transformations for Scala.js" - private final class OwnerKind private (private val baseKinds: Int) extends AnyVal { + private final class OwnerKind private (private val baseKinds: Int) extends AnyVal: inline def isBaseKind: Boolean = Integer.lowestOneBit(baseKinds) == baseKinds && baseKinds != 0 // exactly 1 bit on @@ -1167,9 +1054,8 @@ object PrepJSInterop { inline def isnt(that: OwnerKind): Boolean = !this.is(that) - } - private object OwnerKind { + private object OwnerKind: /** No owner, i.e., we are at the top-level. */ val None = new OwnerKind(0x00) @@ -1218,7 +1104,6 @@ object PrepJSInterop { /** Any kind of class/trait, i.e., a Scala or JS class/trait. */ val AnyClass = ScalaClass | JSNativeClass | JSNonTraitClass | JSTrait - } /** Tests if the symbol extend `js.Any`. * @@ -1232,7 +1117,7 @@ object PrepJSInterop { * * Reports error messages otherwise. */ - def checkSetterSignature(sym: Symbol, pos: SrcPos, exported: Boolean)(using Context): Unit = { + def checkSetterSignature(sym: Symbol, pos: SrcPos, exported: Boolean)(using Context): Unit = val typeStr = if (exported) "Exported" else "JS" val tpe = sym.info @@ -1242,7 +1127,7 @@ object PrepJSInterop { report.error(s"$typeStr setters must return Unit", pos) // There must be exactly one non-varargs, non-default parameter - tpe.paramInfoss match { + tpe.paramInfoss match case List(List(argInfo)) => // Arg list is OK. Do additional checks. if (tpe.isVarArgsMethod) @@ -1252,8 +1137,6 @@ object PrepJSInterop { case _ => report.error(s"$typeStr setters must have exactly one argument", pos) - } - } /** Tests whether the symbol has `private` in any form, either `private`, * `private[this]` or `private[Enclosing]`. @@ -1266,16 +1149,14 @@ object PrepJSInterop { * * Reports an error on the annotation if it is not the case. */ - private def checkJSGlobalLiteral(annot: Annotation)(using Context): Unit = { - if (annot.arguments.nonEmpty) { + private def checkJSGlobalLiteral(annot: Annotation)(using Context): Unit = + if (annot.arguments.nonEmpty) assert(annot.arguments.size == 1, s"@JSGlobal annotation $annot has more than 1 argument") val argIsValid = annot.argumentConstantString(0).isDefined if (!argIsValid) report.error("The argument to @JSGlobal must be a literal string.", annot.arguments.head) - } - } /** Checks that arguments to an `@JSImport` annotation are literals. * @@ -1284,7 +1165,7 @@ object PrepJSInterop { * * Reports an error on the annotation if it is not the case. */ - private def checkJSImportLiteral(annot: Annotation)(using Context): Unit = { + private def checkJSImportLiteral(annot: Annotation)(using Context): Unit = val args = annot.arguments val argCount = args.size assert(argCount >= 1 && argCount <= 3, @@ -1301,10 +1182,9 @@ object PrepJSInterop { val thirdArgIsValid = argCount < 3 || annot.argumentConstantString(2).isDefined if (!thirdArgIsValid) report.error("The third argument to @JSImport, when present, must be a literal string.", args(2)) - } private def checkAndGetJSNativeLoadingSpecAnnotOf(pos: SrcPos, sym: Symbol)( - using Context): Option[Annotation] = { + using Context): Option[Annotation] = // Must not have @JSName @@ -1319,7 +1199,7 @@ object PrepJSInterop { if (sym.is(Module)) "Native JS objects must have exactly one annotation among @JSGlobal, @JSImport and @JSGlobalScope." else "Native JS classes, vals and defs must have exactly one annotation among @JSGlobal and @JSImport." - annots match { + annots match case Nil => report.error(badAnnotCountMsg, pos) None @@ -1331,33 +1211,25 @@ object PrepJSInterop { for (annot <- duplicates) report.error(badAnnotCountMsg, annot.tree) Some(result) - } - } /* Note that we consider @JSGlobalScope as a JS native loading spec because * it's convenient for the purposes of PrepJSInterop. Actually @JSGlobalScope * objects do not receive a JS loading spec in their IR. */ - private def isJSNativeLoadingSpecAnnot(sym: Symbol)(using Context): Boolean = { + private def isJSNativeLoadingSpecAnnot(sym: Symbol)(using Context): Boolean = sym == jsdefn.JSGlobalAnnot || sym == jsdefn.JSImportAnnot || sym == jsdefn.JSGlobalScopeAnnot - } - private def checkInternalAnnotations(sym: Symbol)(using Context): Unit = { + private def checkInternalAnnotations(sym: Symbol)(using Context): Unit = /** Returns true iff it is a compiler annotations. */ - def isCompilerAnnotation(annotation: Annotation): Boolean = { + def isCompilerAnnotation(annotation: Annotation): Boolean = annotation.symbol == jsdefn.ExposedJSMemberAnnot || annotation.symbol == jsdefn.JSTypeAnnot || annotation.symbol == jsdefn.JSOptionalAnnot - } - for (annotation <- sym.annotations) { - if (isCompilerAnnotation(annotation)) { + for (annotation <- sym.annotations) + if (isCompilerAnnotation(annotation)) report.error( em"@${annotation.symbol.fullName} is for compiler internal use only. Do not use it yourself.", annotation.tree) - } - } - } -} diff --git a/compiler/src/dotty/tools/dotc/typer/Applications.scala b/compiler/src/dotty/tools/dotc/typer/Applications.scala index fbed4b77d3fe..ebfe9c3c3c95 100644 --- a/compiler/src/dotty/tools/dotc/typer/Applications.scala +++ b/compiler/src/dotty/tools/dotc/typer/Applications.scala @@ -37,18 +37,17 @@ import annotation.threadUnsafe import scala.util.control.NonFatal -object Applications { +object Applications: import tpd._ def extractorMember(tp: Type, name: Name)(using Context): SingleDenotation = tp.member(name).suchThat(sym => sym.info.isParameterless && sym.info.widenExpr.isValueType) - def extractorMemberType(tp: Type, name: Name, errorPos: SrcPos)(using Context): Type = { + def extractorMemberType(tp: Type, name: Name, errorPos: SrcPos)(using Context): Type = val ref = extractorMember(tp, name) if (ref.isOverloaded) errorType(em"Overloaded reference to $ref is not allowed in extractor", errorPos) ref.info.widenExpr.annotatedToRepeated - } /** Does `tp` fit the "product match" conditions as an unapply result type * for a pattern with `numArgs` subpatterns? @@ -62,11 +61,10 @@ object Applications { * This is the case if (1) `tp` has members `_1` to `_N` where `N <= numArgs + 1`. * (2) `tp._N` conforms to Seq match */ - def isProductSeqMatch(tp: Type, numArgs: Int, errorPos: SrcPos = NoSourcePosition)(using Context): Boolean = { + def isProductSeqMatch(tp: Type, numArgs: Int, errorPos: SrcPos = NoSourcePosition)(using Context): Boolean = val arity = productArity(tp, errorPos) arity > 0 && arity <= numArgs + 1 && unapplySeqTypeElemTp(productSelectorTypes(tp, errorPos).last).exists - } /** Does `tp` fit the "get match" conditions as an unapply result type? * This is the case of `tp` has a `get` member as well as a @@ -87,7 +85,7 @@ object Applications { * ``` * returns `T`, otherwise NoType. */ - def unapplySeqTypeElemTp(getTp: Type)(using Context): Type = { + def unapplySeqTypeElemTp(getTp: Type)(using Context): Type = def lengthTp = ExprType(defn.IntType) def lengthCompareTp = MethodType(List(defn.IntType), defn.IntType) def applyTp(elemTp: Type) = MethodType(List(defn.IntType), elemTp) @@ -107,12 +105,10 @@ object Applications { hasMethod(nme.toSeq, toSeqTp(elemTp)) if (isValid) elemTp else NoType - } - def productSelectorTypes(tp: Type, errorPos: SrcPos)(using Context): List[Type] = { + def productSelectorTypes(tp: Type, errorPos: SrcPos)(using Context): List[Type] = val sels = for (n <- Iterator.from(0)) yield extractorMemberType(tp, nme.selectorName(n), errorPos) sels.takeWhile(_.exists).toList - } def tupleComponentTypes(tp: Type)(using Context): List[Type] = tp.widenExpr.dealias.normalized match @@ -130,28 +126,25 @@ object Applications { def productArity(tp: Type, errorPos: SrcPos = NoSourcePosition)(using Context): Int = if (defn.isProductSubType(tp)) productSelectorTypes(tp, errorPos).size else -1 - def productSelectors(tp: Type)(using Context): List[Symbol] = { + def productSelectors(tp: Type)(using Context): List[Symbol] = val sels = for (n <- Iterator.from(0)) yield tp.member(nme.selectorName(n)).suchThat(_.info.isParameterless).symbol sels.takeWhile(_.exists).toList - } def getUnapplySelectors(tp: Type, args: List[untpd.Tree], pos: SrcPos)(using Context): List[Type] = - if (args.length > 1 && !(tp.derivesFrom(defn.SeqClass))) { + if (args.length > 1 && !(tp.derivesFrom(defn.SeqClass))) val sels = productSelectorTypes(tp, pos) if (sels.length == args.length) sels else tp :: Nil - } else tp :: Nil - def productSeqSelectors(tp: Type, argsNum: Int, pos: SrcPos)(using Context): List[Type] = { + def productSeqSelectors(tp: Type, argsNum: Int, pos: SrcPos)(using Context): List[Type] = val selTps = productSelectorTypes(tp, pos) val arity = selTps.length val elemTp = unapplySeqTypeElemTp(selTps.last) (0 until argsNum).map(i => if (i < arity - 1) selTps(i) else elemTp).toList - } - def unapplyArgs(unapplyResult: Type, unapplyFn: Tree, args: List[untpd.Tree], pos: SrcPos)(using Context): List[Type] = { + def unapplyArgs(unapplyResult: Type, unapplyFn: Tree, args: List[untpd.Tree], pos: SrcPos)(using Context): List[Type] = def getName(fn: Tree): Name = fn match case TypeApply(fn, _) => getName(fn) @@ -161,25 +154,22 @@ object Applications { def getTp = extractorMemberType(unapplyResult, nme.get, pos) - def fail = { + def fail = report.error(UnapplyInvalidReturnType(unapplyResult, unapplyName), pos) Nil - } - def unapplySeq(tp: Type)(fallback: => List[Type]): List[Type] = { + def unapplySeq(tp: Type)(fallback: => List[Type]): List[Type] = val elemTp = unapplySeqTypeElemTp(tp) if (elemTp.exists) args.map(Function.const(elemTp)) else if (isProductSeqMatch(tp, args.length, pos)) productSeqSelectors(tp, args.length, pos) else if tp.derivesFrom(defn.NonEmptyTupleClass) then foldApplyTupleType(tp) else fallback - } if (unapplyName == nme.unapplySeq) - unapplySeq(unapplyResult) { + unapplySeq(unapplyResult): if (isGetMatch(unapplyResult, pos)) unapplySeq(getTp)(fail) else fail - } - else { + else assert(unapplyName == nme.unapply) if (isProductMatch(unapplyResult, args.length, pos)) productSelectorTypes(unapplyResult, pos) @@ -194,8 +184,6 @@ object Applications { else if unapplyResult.derivesFrom(defn.NonEmptyTupleClass) then foldApplyTupleType(unapplyResult) else fail - } - } def foldApplyTupleType(tp: Type)(using Context): List[Type] = object tupleFold extends TypeAccumulator[List[Type]]: @@ -240,10 +228,10 @@ object Applications { case Select(receiver, _) => findDefaultGetter(meth, receiver, idx) case mr => mappedAltInfo(meth) match - case Some((pre, skipped)) => - findDefaultGetter(meth, reifyPrefix(pre), idx + skipped) - case None => - findDefaultGetter(meth, reifyPrefix(mr.tpe.normalizedPrefix), idx) + case Some((pre, skipped)) => + findDefaultGetter(meth, reifyPrefix(pre), idx + skipped) + case None => + findDefaultGetter(meth, reifyPrefix(mr.tpe.normalizedPrefix), idx) else EmptyTree // structural applies don't have symbols or defaults end findDefaultGetter @@ -289,7 +277,7 @@ object Applications { end findDefaultGetter /** Splice new method reference `meth` into existing application `app` */ - private def spliceMeth(meth: Tree, app: Tree)(using Context): Tree = app match { + private def spliceMeth(meth: Tree, app: Tree)(using Context): Tree = app match case Apply(fn, args) => // Constructors always have one leading non-implicit parameter list. // Empty list is inserted for constructors where the first parameter list is implicit. @@ -335,7 +323,6 @@ object Applications { // when typing the default argument, which is too early. spliceMeth(meth, fn).appliedToTypes(targs.tpes) case _ => meth - } def defaultArgument(fn: Tree, n: Int, testOnly: Boolean)(using Context): Tree = val getter = findDefaultGetter(fn, n, testOnly) @@ -347,9 +334,8 @@ object Applications { val flags2 = sym1.flags | NonMember // ensures Select typing doesn't let TermRef#withPrefix revert the type val sym2 = sym1.copy(info = methType, flags = flags2) // symbol not entered, to avoid overload resolution problems fun.withType(sym2.termRef) -} -trait Applications extends Compatibility { +trait Applications extends Compatibility: self: Typer & Dynamic => import Applications._ @@ -362,7 +348,7 @@ trait Applications extends Compatibility { * @param args the arguments of the application * @param resultType the expected result type of the application */ - abstract class Application[Arg](methRef: TermRef, funType: Type, args: List[Arg], resultType: Type)(using Context) { + abstract class Application[Arg](methRef: TermRef, funType: Type, args: List[Arg], resultType: Type)(using Context): /** The type of typed arguments: either tpd.Tree or Type */ type TypedArg @@ -417,8 +403,8 @@ trait Applications extends Compatibility { /** Whether `liftFun` is needed? It is the case if default arguments are used. */ - protected def needLiftFun: Boolean = { - def requiredArgNum(tp: Type): Int = tp.widen match { + protected def needLiftFun: Boolean = + def requiredArgNum(tp: Type): Int = tp.widen match case funType: MethodType => val paramInfos = funType.paramInfos val argsNum = paramInfos.size @@ -429,11 +415,9 @@ trait Applications extends Compatibility { argsNum case funType: PolyType => requiredArgNum(funType.resultType) case tp => args.size - } !isJavaAnnotConstr(methRef.symbol) && args.size < requiredArgNum(funType) - } /** A flag signalling that the typechecking the application was so far successful */ private var _ok = true @@ -444,24 +428,20 @@ trait Applications extends Compatibility { /** The function's type after widening and instantiating polytypes * with TypeParamRefs in constraint set */ - @threadUnsafe lazy val methType: Type = { - def rec(t: Type): Type = { - t.widen match{ + @threadUnsafe lazy val methType: Type = + def rec(t: Type): Type = + t.widen match case funType: MethodType => funType case funType: PolyType => rec(instantiateWithTypeVars(funType)) case tp => tp - } - } rec(liftedFunType) - } @threadUnsafe lazy val liftedFunType: Type = - if (needLiftFun) { + if (needLiftFun) liftFun() normalizedFun.tpe - } else funType /** The arguments re-ordered so that each named argument matches the @@ -473,7 +453,7 @@ trait Applications extends Compatibility { else args - protected def init(): Unit = methType match { + protected def init(): Unit = methType match case methType: MethodType => val resultApprox = resultTypeApprox(methType) val sym = methRef.symbol @@ -493,7 +473,6 @@ trait Applications extends Compatibility { case _ => if (methType.isError) ok = false else fail(em"$methString does not take parameters") - } /** The application was successful */ def success: Boolean = ok @@ -504,7 +483,7 @@ trait Applications extends Compatibility { i"${err.refStr(methRef)}$infoStr" /** Re-order arguments to correctly align named arguments */ - def reorder[T <: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = { + def reorder[T <: Untyped](args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = /** @param pnames The list of parameter names that are missing arguments * @param args The list of arguments that are not yet passed, or that are waiting to be dropped @@ -517,19 +496,19 @@ trait Applications extends Compatibility { * 2. For every `(name -> arg)` in `nameToArg`, `arg` is an element of `args` */ def handleNamed(pnames: List[Name], args: List[Trees.Tree[T]], - nameToArg: Map[Name, Trees.NamedArg[T]], toDrop: Set[Name]): List[Trees.Tree[T]] = pnames match { + nameToArg: Map[Name, Trees.NamedArg[T]], toDrop: Set[Name]): List[Trees.Tree[T]] = pnames match case pname :: pnames1 if nameToArg contains pname => // there is a named argument for this parameter; pick it nameToArg(pname) :: handleNamed(pnames1, args, nameToArg - pname, toDrop + pname) case _ => def pnamesRest = if (pnames.isEmpty) pnames else pnames.tail - args match { + args match case (arg @ NamedArg(aname, _)) :: args1 => if (toDrop contains aname) // argument is already passed handleNamed(pnames, args1, nameToArg, toDrop - aname) else if ((nameToArg contains aname) && pnames.nonEmpty) // argument is missing, pass an empty tree genericEmptyTree :: handleNamed(pnames.tail, args, nameToArg, toDrop) - else { // name not (or no longer) available for named arg + else // name not (or no longer) available for named arg def msg = if (methodType.paramNames contains aname) em"parameter $aname of $methString is already instantiated" @@ -537,27 +516,22 @@ trait Applications extends Compatibility { em"$methString does not have a parameter $aname" fail(msg, arg.asInstanceOf[Arg]) arg :: handleNamed(pnamesRest, args1, nameToArg, toDrop) - } case arg :: args1 => arg :: handleNamed(pnamesRest, args1, nameToArg, toDrop) // unnamed argument; pick it case Nil => // no more args, continue to pick up any preceding named args if (pnames.isEmpty) Nil else handleNamed(pnamesRest, args, nameToArg, toDrop) - } - } def handlePositional(pnames: List[Name], args: List[Trees.Tree[T]]): List[Trees.Tree[T]] = - args match { + args match case (arg: NamedArg @unchecked) :: _ => val nameAssocs = for (case arg @ NamedArg(name, _) <- args) yield (name, arg) handleNamed(pnames, args, nameAssocs.toMap, Set()) case arg :: args1 => arg :: handlePositional(if (pnames.isEmpty) Nil else pnames.tail, args1) case Nil => Nil - } handlePositional(methodType.paramNames, args) - } /** Is `sym` a constructor of a Java-defined annotation? */ def isJavaAnnotConstr(sym: Symbol): Boolean = @@ -567,7 +541,7 @@ trait Applications extends Compatibility { * @param n The position of the first parameter in formals in `methType`. */ def matchArgs(args: List[Arg], formals: List[Type], n: Int): Unit = - if (success) formals match { + if (success) formals match case formal :: formals1 => def checkNoVarArg(arg: Arg) = @@ -596,12 +570,12 @@ trait Applications extends Compatibility { def missingArg(n: Int): Unit = fail(MissingArgument(methodType.paramNames(n), methString)) - def tryDefault(n: Int, args1: List[Arg]): Unit = { + def tryDefault(n: Int, args1: List[Arg]): Unit = val sym = methRef.symbol val testOnly = this.isInstanceOf[TestApplication[?]] val defaultArg = - if (isJavaAnnotConstr(sym)) { + if (isJavaAnnotConstr(sym)) val cinfo = sym.owner.asClass.classInfo val pname = methodType.paramNames(n) val hasDefault = cinfo.member(pname) @@ -613,7 +587,6 @@ trait Applications extends Compatibility { tpd.Underscore(formal) else EmptyTree - } else defaultArgument(normalizedFun, n, testOnly) def implicitArg = implicitArgTree(formal, appPos.span) @@ -626,10 +599,9 @@ trait Applications extends Compatibility { matchArgs(args1, addTyped(treeToArg(implicitArg)), n + 1) else missingArg(n) - } if (formal.isRepeatedParam) - args match { + args match case arg :: Nil if isVarArg(arg) => addTyped(arg) case (arg @ Typed(Literal(Constant(null)), _)) :: Nil if ctx.isAfterTyper => @@ -637,26 +609,23 @@ trait Applications extends Compatibility { case _ => val elemFormal = formal.widenExpr.argTypesLo.head val typedArgs = - harmonic(harmonizeArgs, elemFormal) { + harmonic(harmonizeArgs, elemFormal): args.map { arg => checkNoVarArg(arg) typedArg(arg, elemFormal) } - } typedArgs.foreach(addArg(_, elemFormal)) makeVarArg(args.length, elemFormal) - } - else args match { + else args match case EmptyTree :: args1 => tryDefault(n, args1) case arg :: args1 => matchArgs(args1, addTyped(arg), n + 1) case nil => tryDefault(n, args) - } case nil => - args match { + args match case arg :: args1 => def msg = arg match case untpd.Tuple(Nil) @@ -666,9 +635,6 @@ trait Applications extends Compatibility { em"too many arguments for $methString" fail(msg, arg) case nil => - } - } - } /** The degree to which an argument has to match a formal parameter */ enum ArgMatch: @@ -680,7 +646,7 @@ trait Applications extends Compatibility { * issue error messages. */ abstract class TestApplication[Arg](methRef: TermRef, funType: Type, args: List[Arg], resultType: Type, argMatch: ArgMatch)(using Context) - extends Application[Arg](methRef, funType, args, resultType) { + extends Application[Arg](methRef, funType, args, resultType): type TypedArg = Arg type Result = Unit @@ -738,13 +704,12 @@ trait Applications extends Compatibility { def appPos: SrcPos = NoSourcePosition @threadUnsafe lazy val normalizedFun: Tree = ref(methRef, needLoad = false) init() - } /** Subclass of Application for applicability tests with type arguments and value * argument trees. */ class ApplicableToTrees(methRef: TermRef, args: List[Tree], resultType: Type, argMatch: ArgMatch)(using Context) - extends TestApplication(methRef, methRef.widen, args, resultType, argMatch) { + extends TestApplication(methRef, methRef.widen, args, resultType, argMatch): def argType(arg: Tree, formal: Type): Type = if untpd.isContextualClosure(arg) && defn.isContextFunctionType(formal) then arg.tpe else normalize(arg.tpe, formal) @@ -752,17 +717,15 @@ trait Applications extends Compatibility { def isVarArg(arg: Tree): Boolean = tpd.isWildcardStarArg(arg) def typeOfArg(arg: Tree): Type = arg.tpe def harmonizeArgs(args: List[Tree]): List[Tree] = harmonize(args) - } /** Subclass of Application for applicability tests with value argument types. */ class ApplicableToTypes(methRef: TermRef, args: List[Type], resultType: Type, argMatch: ArgMatch)(using Context) - extends TestApplication(methRef, methRef, args, resultType, argMatch) { + extends TestApplication(methRef, methRef, args, resultType, argMatch): def argType(arg: Type, formal: Type): Type = arg def treeToArg(arg: Tree): Type = arg.tpe def isVarArg(arg: Type): Boolean = arg.isRepeatedParam def typeOfArg(arg: Type): Type = arg def harmonizeArgs(args: List[Type]): List[Type] = harmonizeTypes(args) - } /** Subclass of Application for type checking an Apply node, where * types of arguments are either known or unknown. @@ -770,7 +733,7 @@ trait Applications extends Compatibility { abstract class TypedApply[T <: Untyped]( app: untpd.Apply, fun: Tree, methRef: TermRef, args: List[Trees.Tree[T]], resultType: Type, override val applyKind: ApplyKind)(using Context) - extends Application(methRef, fun.tpe, args, resultType) { + extends Application(methRef, fun.tpe, args, resultType): type TypedArg = Tree def isVarArg(arg: Trees.Tree[T]): Boolean = untpd.isWildcardStarArg(arg) private var typedArgBuf = new mutable.ListBuffer[Tree] @@ -781,12 +744,11 @@ trait Applications extends Compatibility { def addArg(arg: Tree, formal: Type): Unit = typedArgBuf += adapt(arg, formal.widenExpr) - def makeVarArg(n: Int, elemFormal: Type): Unit = { + def makeVarArg(n: Int, elemFormal: Type): Unit = val args = typedArgBuf.takeRight(n).toList typedArgBuf.dropRightInPlace(n) val elemtpt = TypeTree(elemFormal) typedArgBuf += seqToRepeated(SeqLiteral(args, elemtpt)) - } def harmonizeArgs(args: List[TypedArg]): List[Tree] = // harmonize args only if resType depends on parameter types @@ -795,15 +757,13 @@ trait Applications extends Compatibility { override def appPos: SrcPos = app.srcPos - def fail(msg: Message, arg: Trees.Tree[T]): Unit = { + def fail(msg: Message, arg: Trees.Tree[T]): Unit = report.error(msg, arg.srcPos) ok = false - } - def fail(msg: Message): Unit = { + def fail(msg: Message): Unit = report.error(msg, app.srcPos) ok = false - } def normalizedFun: Tree = myNormalizedFun @@ -811,26 +771,22 @@ trait Applications extends Compatibility { if (methRef.symbol.hasDefaultParams) LiftComplex else LiftImpure override def liftFun(): Unit = - if (liftedDefs == null) { + if (liftedDefs == null) liftedDefs = new mutable.ListBuffer[Tree] myNormalizedFun = lifter.liftApp(liftedDefs.uncheckedNN, myNormalizedFun) - } /** The index of the first difference between lists of trees `xs` and `ys` * -1 if there are no differences. */ - private def firstDiff[T <: Trees.Tree[?]](xs: List[T], ys: List[T], n: Int = 0): Int = xs match { + private def firstDiff[T <: Trees.Tree[?]](xs: List[T], ys: List[T], n: Int = 0): Int = xs match case x :: xs1 => - ys match { + ys match case y :: ys1 => if (x ne y) n else firstDiff(xs1, ys1, n + 1) case nil => n - } case nil => - ys match { + ys match case y :: ys1 => n case nil => -1 - } - } private def sameSeq[T <: Trees.Tree[?]](xs: List[T], ys: List[T]): Boolean = firstDiff(xs, ys) < 0 /** An argument is safe if it is a pure expression or a default getter call @@ -840,12 +796,12 @@ trait Applications extends Compatibility { isPureExpr(arg) || arg.isInstanceOf[RefTree | Apply | TypeApply] && arg.symbol.name.is(DefaultGetterName) - val result: Tree = { + val result: Tree = var typedArgs = typedArgBuf.toList def app0 = cpy.Apply(app)(normalizedFun, typedArgs) // needs to be a `def` because typedArgs can change later val app1 = if (!success) app0.withType(UnspecifiedErrorType) - else { + else if !sameSeq(args, orderedArgs) && !isJavaAnnotConstr(methRef.symbol) && !typedArgs.forall(isSafeArg) @@ -860,15 +816,13 @@ trait Applications extends Compatibility { typedArgs = lifter.liftArgs(argDefBuf, methType, typedArgs) // Lifted arguments ordered based on the original order of typedArgBuf and // with all non-explicit default parameters at the end in declaration order. - val orderedArgDefs = { + val orderedArgDefs = // Indices of original typed arguments that are lifted by liftArgs - val impureArgIndices = typedArgBuf.zipWithIndex.collect { + val impureArgIndices = typedArgBuf.zipWithIndex.collect: case (arg, idx) if !lifter.noLift(arg) => idx - } - def position(arg: Trees.Tree[T]) = { + def position(arg: Trees.Tree[T]) = val i = args.indexOf(arg) if (i >= 0) i else orderedArgs.length - } // The original indices of all ordered arguments, as an array val originalIndices = orderedArgs.map(position).toArray // Assuming stable sorting all non-explicit default parameters will remain in the end with the same order @@ -878,36 +832,30 @@ trait Applications extends Compatibility { if (n < originalIndices.length) originalIndices(n) else orderedArgs.length scala.util.Sorting.stableSort[(Tree, Int), Int]( argDefBuf.zip(impureArgIndices), (arg, idx) => originalIndex(idx)).map(_._1) - } liftedDefs.nn ++= orderedArgDefs end if if (sameSeq(typedArgs, args)) // trick to cut down on tree copying typedArgs = args.asInstanceOf[List[Tree]] assignType(app0, normalizedFun, typedArgs) - } wrapDefs(liftedDefs, app1) - } - } /** Subclass of Application for type checking an Apply node with untyped arguments. */ class ApplyToUntyped( app: untpd.Apply, fun: Tree, methRef: TermRef, proto: FunProto, resultType: Type)(using Context) - extends TypedApply(app, fun, methRef, proto.args, resultType, proto.applyKind) { + extends TypedApply(app, fun, methRef, proto.args, resultType, proto.applyKind): def typedArg(arg: untpd.Tree, formal: Type): TypedArg = proto.typedArg(arg, formal) def treeToArg(arg: Tree): untpd.Tree = untpd.TypedSplice(arg) def typeOfArg(arg: untpd.Tree): Type = proto.typeOfArg(arg) - } /** Subclass of Application for type checking an Apply node with typed arguments. */ class ApplyToTyped( app: untpd.Apply, fun: Tree, methRef: TermRef, args: List[Tree], resultType: Type, applyKind: ApplyKind)(using Context) - extends TypedApply(app, fun, methRef, args, resultType, applyKind) { + extends TypedApply(app, fun, methRef, args, resultType, applyKind): def typedArg(arg: Tree, formal: Type): TypedArg = arg def treeToArg(arg: Tree): Tree = arg def typeOfArg(arg: Tree): Type = arg.tpe - } /** If `app` is a `this(...)` constructor call, the this-call argument context, * otherwise the current context. @@ -920,9 +868,9 @@ trait Applications extends Compatibility { * or, if application is an operator assignment, also an `Assign` or * Block node. */ - def typedApply(tree: untpd.Apply, pt: Type)(using Context): Tree = { + def typedApply(tree: untpd.Apply, pt: Type)(using Context): Tree = - def realApply(using Context): Tree = { + def realApply(using Context): Tree = val resultProto = tree.fun match case Select(New(tpt), _) if pt.isInstanceOf[ValueType] => if tpt.isType && typedAheadType(tpt).tpe.typeSymbol.typeParams.isEmpty then @@ -949,7 +897,7 @@ trait Applications extends Compatibility { /** Type application where arguments come from prototype, and no implicits are inserted */ def simpleApply(fun1: Tree, proto: FunProto)(using Context): Tree = - methPart(fun1).tpe match { + methPart(fun1).tpe match case funRef: TermRef if funRef.symbol.isSignaturePolymorphic => // synthesize a method type based on the types at the call site. // one can imagine the original signature-polymorphic method as @@ -973,7 +921,6 @@ trait Applications extends Compatibility { pt)) case _ => handleUnexpectedFunType(tree, fun1) - } /** Try same application with an implicit inserted around the qualifier of the function * part. Return an optional value to indicate success. @@ -992,10 +939,10 @@ trait Applications extends Compatibility { } } - fun1.tpe match { + fun1.tpe match case err: ErrorType => cpy.Apply(tree)(fun1, proto.typedArgs()).withType(err) case TryDynamicCallType => - val isInsertedApply = fun1 match { + val isInsertedApply = fun1 match case Select(_, nme.apply) => fun1.span.isSynthetic case TypeApply(sel @ Select(_, nme.apply), _) => sel.span.isSynthetic /* TODO Get rid of this case. It is still syntax-based, therefore unreliable. @@ -1009,7 +956,6 @@ trait Applications extends Compatibility { */ case TypeApply(fun, _) => !fun.isInstanceOf[Select] case _ => false - } val tree1 = fun1 match case Select(_, nme.apply) => tree case _ => untpd.Apply(fun1, tree.args) @@ -1034,19 +980,17 @@ trait Applications extends Compatibility { // To make this work using regular inlining, we'd need a way to annotate // an inline function that it should expand only if there are no enclosing // applications of inline functions. - tree.args match { + tree.args match case (arg @ Match(EmptyTree, cases)) :: Nil => - cases.foreach { + cases.foreach: case CaseDef(Typed(_: untpd.Ident, _), _, _) => // OK case CaseDef(Bind(_, Typed(_: untpd.Ident, _)), _, _) => // OK case CaseDef(Ident(name), _, _) if name == nme.WILDCARD => // Ok case CaseDef(pat, _, _) => report.error(UnexpectedPatternForSummonFrom(pat), pat.srcPos) - } typed(untpd.InlineMatch(EmptyTree, cases).withSpan(tree.span), pt) case _ => errorTree(tree, em"argument to summonFrom must be a pattern matching closure") - } else tryEither { simpleApply(fun1, proto) @@ -1061,8 +1005,6 @@ trait Applications extends Compatibility { if (proto eq originalProto) fail else tryWithImplicitOnQualifier(fun1, proto).getOrElse(fail)) } - } - } /** Convert expression like * @@ -1072,7 +1014,7 @@ trait Applications extends Compatibility { * * { val xs = es; e' = e' + args } */ - def typedOpAssign(using Context): Tree = { + def typedOpAssign(using Context): Tree = val (lhs1, name, rhss) = (tree: @unchecked) match case Apply(Select(lhs, name), rhss) => (typedExpr(lhs), name, rhss) case Apply(untpd.TypedSplice(Select(lhs1, name)), rhss) => (lhs1, name, rhss) @@ -1081,7 +1023,6 @@ trait Applications extends Compatibility { val assign = untpd.Assign(lhs2, untpd.Apply(untpd.Select(lhs2, name.asSimpleName.dropRight(1)), rhss)) wrapDefs(liftedDefs, typed(assign)) - } val app1 = if (untpd.isOpAssign(tree)) @@ -1095,26 +1036,22 @@ trait Applications extends Compatibility { failedVal } } - else { + else val app = tree.fun match case _: untpd.SplicePattern => typedAppliedSplice(tree, pt) case _ => realApply - app match { + app match case Apply(fn @ Select(left, _), right :: Nil) if fn.hasType => val op = fn.symbol if (op == defn.Any_== || op == defn.Any_!=) checkCanEqual(left.tpe.widen, right.tpe.widen, app.span) case _ => - } app - } - app1 match { + app1 match case Apply(Block(stats, fn), args) => tpd.cpy.Block(app1)(stats, tpd.cpy.Apply(app1)(fn, args)) case _ => app1 - } - } /** Typecheck an Apply node with a typed function and possibly-typed arguments coming from `proto` */ def ApplyTo(app: untpd.Apply, fun: tpd.Tree, methRef: TermRef, proto: FunProto, resultType: Type)(using Context): tpd.Tree = @@ -1133,7 +1070,7 @@ trait Applications extends Compatibility { throw Error(i"unexpected type.\n fun = $fun,\n methPart(fun) = ${methPart(fun)},\n methPart(fun).tpe = ${methPart(fun).tpe},\n tpe = ${fun.tpe}") def typedNamedArgs(args: List[untpd.Tree])(using Context): List[NamedArg] = - for (case arg @ NamedArg(id, argtpt) <- args) yield { + for (case arg @ NamedArg(id, argtpt) <- args) yield if !Feature.namedTypeArgsEnabled then report.error( em"""Named type arguments are experimental, @@ -1141,16 +1078,15 @@ trait Applications extends Compatibility { arg.srcPos) val argtpt1 = typedType(argtpt) cpy.NamedArg(arg)(id, argtpt1).withType(argtpt1.tpe) - } - def typedTypeApply(tree: untpd.TypeApply, pt: Type)(using Context): Tree = { + def typedTypeApply(tree: untpd.TypeApply, pt: Type)(using Context): Tree = if (ctx.mode.is(Mode.Pattern)) return errorTree(tree, em"invalid pattern") val isNamed = hasNamedArg(tree.args) val typedArgs = if (isNamed) typedNamedArgs(tree.args) else tree.args.mapconserve(typedType(_)) record("typedTypeApply") - typedExpr(tree.fun, PolyProto(typedArgs, pt)) match { + typedExpr(tree.fun, PolyProto(typedArgs, pt)) match case fun: TypeApply if !ctx.isAfterTyper => val function = fun.fun val args = (fun.args ++ tree.args).map(_.show).mkString(", ") @@ -1158,30 +1094,25 @@ trait Applications extends Compatibility { |You might have meant something like: |${function}[${args}]""") case typedFn => - typedFn.tpe.widen match { + typedFn.tpe.widen match case pt: PolyType => if (typedArgs.length <= pt.paramInfos.length && !isNamed) - if (typedFn.symbol == defn.Predef_classOf && typedArgs.nonEmpty) { + if (typedFn.symbol == defn.Predef_classOf && typedArgs.nonEmpty) val arg = typedArgs.head if (!arg.symbol.is(Module)) // Allow `classOf[Foo.type]` if `Foo` is an object checkClassType(arg.tpe, arg.srcPos, traitReq = false, stablePrefixReq = false) - } case _ => - } - def tryDynamicTypeApply(): Tree = typedFn match { + def tryDynamicTypeApply(): Tree = typedFn match case typedFn: Select if !pt.isInstanceOf[FunProto] => typedDynamicSelect(typedFn, typedArgs.map(untpd.TypedSplice(_)), pt) case _ => tree.withType(TryDynamicCallType) - } if (typedFn.tpe eq TryDynamicCallType) tryDynamicTypeApply() else assignType(cpy.TypeApply(tree)(typedFn, typedArgs), typedFn, typedArgs) - } - } /** Rewrite `new Array[T](....)` if T is an unbounded generic to calls to newGenericArray. * It is performed during typer as creation of generic arrays needs a classTag. * we rely on implicit search to find one. */ - def convertNewGenericArray(tree: Tree)(using Context): Tree = tree match { + def convertNewGenericArray(tree: Tree)(using Context): Tree = tree match case Apply(TypeApply(tycon, targs@(targ :: Nil)), args) if tycon.symbol == defn.ArrayConstructor => fullyDefinedType(tree.tpe, "array", tree.srcPos) @@ -1195,22 +1126,19 @@ trait Applications extends Compatibility { else tree case _ => tree - } /** Is `tp` a unary function type or an overloaded type with with only unary function * types as alternatives? */ - def isUnary(tp: Type)(using Context): Boolean = tp match { + def isUnary(tp: Type)(using Context): Boolean = tp match case tp: MethodicType => - tp.firstParamTypes match { + tp.firstParamTypes match case ptype :: Nil => !ptype.isRepeatedParam case _ => false - } case tp: TermRef => tp.denot.alternatives.forall(alt => isUnary(alt.info)) case _ => false - } /** Should we tuple or untuple the argument before application? * If auto-tupling is enabled then @@ -1266,7 +1194,7 @@ trait Applications extends Compatibility { case _ => false case _ => false - def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = { + def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = record("typedUnApply") val Apply(qual, args) = tree @@ -1292,24 +1220,20 @@ trait Applications extends Compatibility { * If this refers to a type alias, follow the alias, and if * one finds a class, reference the class companion module. */ - def followTypeAlias(tree: untpd.Tree): untpd.Tree = { - tree match { + def followTypeAlias(tree: untpd.Tree): untpd.Tree = + tree match case tree: untpd.RefTree => val nestedCtx = ctx.fresh.setNewTyperState() val ttree = typedType(untpd.rename(tree, tree.name.toTypeName))(using nestedCtx) - ttree.tpe match { + ttree.tpe match case alias: TypeRef if alias.info.isTypeAlias && !nestedCtx.reporter.hasErrors => - Inferencing.companionRef(alias) match { + Inferencing.companionRef(alias) match case companion: TermRef => return untpd.ref(companion).withSpan(tree.span) case _ => - } case _ => - } case _ => - } untpd.EmptyTree - } /** A typed qual.unapply or qual.unapplySeq tree, if this typechecks. * Otherwise fallBack with (maltyped) qual.unapply as argument @@ -1321,7 +1245,7 @@ trait Applications extends Compatibility { * overloaded unapply does *not* need to be applicable to its argument * whereas overloaded variants need to have a conforming variant. */ - def trySelectUnapply(qual: untpd.Tree)(fallBack: (Tree, TyperState) => Tree): Tree = { + def trySelectUnapply(qual: untpd.Tree)(fallBack: (Tree, TyperState) => Tree): Tree = // try first for non-overloaded, then for overloaded occurrences def tryWithName(name: TermName)(fallBack: (Tree, TyperState) => Tree)(using Context): Tree = @@ -1360,49 +1284,42 @@ trait Applications extends Compatibility { end tryWithName // try first for unapply, then for unapplySeq - tryWithName(nme.unapply) { + tryWithName(nme.unapply): (sel, state) => - tryWithName(nme.unapplySeq) { + tryWithName(nme.unapplySeq): (sel2, state2) => // if both fail, return unapply error, unless that is simply a // "not a member", and the unapplySeq error is more refined. if saysNotFound(state, nme.unapply) && !saysNotFound(state2, nme.unapplySeq) then fallBack(sel2, state2) else fallBack(sel, state) - } - } - } /** Produce a typed qual.unapply or qual.unapplySeq tree, or * else if this fails follow a type alias and try again. */ var unapplyFn = - trySelectUnapply(qual) { + trySelectUnapply(qual): (sel, state) => val qual1 = followTypeAlias(qual) if (qual1.isEmpty) reportErrors(sel, state) - else trySelectUnapply(qual1) { + else trySelectUnapply(qual1): (_, state) => reportErrors(sel, state) - } - } /** Add a `Bind` node for each `bound` symbol in a type application `unapp` */ - def addBinders(unapp: Tree, bound: List[Symbol]) = unapp match { + def addBinders(unapp: Tree, bound: List[Symbol]) = unapp match case TypeApply(fn, args) => var remain = bound.toSet - def addBinder(arg: Tree) = arg.tpe.stripTypeVar match { + def addBinder(arg: Tree) = arg.tpe.stripTypeVar match case ref: TypeRef if remain.contains(ref.symbol) => remain -= ref.symbol tpd.Bind(ref.symbol, Ident(ref)) case _ => arg - } tpd.cpy.TypeApply(unapp)(fn, args.mapConserve(addBinder)) case _ => unapp - } - unapplyFn.tpe.widen match { + unapplyFn.tpe.widen match case mt: MethodType if mt.paramInfos.length == 1 => val unapplyArgType = mt.paramInfos.head unapp.println(i"unapp arg tpe = $unapplyArgType, pt = $selType") @@ -1425,33 +1342,29 @@ trait Applications extends Compatibility { val dummyArg = dummyTreeOfType(ownType) val unapplyApp = typedExpr(untpd.TypedSplice(Apply(unapplyFn, dummyArg :: Nil))) - def unapplyImplicits(unapp: Tree): List[Tree] = { + def unapplyImplicits(unapp: Tree): List[Tree] = val res = List.newBuilder[Tree] - def loop(unapp: Tree): Unit = unapp match { + def loop(unapp: Tree): Unit = unapp match case Apply(Apply(unapply, `dummyArg` :: Nil), args2) => assert(args2.nonEmpty); res ++= args2 case Apply(unapply, `dummyArg` :: Nil) => case Inlined(u, _, _) => loop(u) case DynamicUnapply(_) => report.error(em"Structural unapply is not supported", unapplyFn.srcPos) case Apply(fn, args) => assert(args.nonEmpty); loop(fn); res ++= args case _ => ().assertingErrorsReported - } loop(unapp) res.result() - } var argTypes = unapplyArgs(unapplyApp.tpe, unapplyFn, args, tree.srcPos) for (argType <- argTypes) assert(!isBounds(argType), unapplyApp.tpe.show) - val bunchedArgs = argTypes match { + val bunchedArgs = argTypes match case argType :: Nil => if (args.lengthCompare(1) > 0 && Feature.autoTuplingEnabled && defn.isTupleNType(argType)) untpd.Tuple(args) :: Nil else args case _ => args - } - if (argTypes.length != bunchedArgs.length) { + if (argTypes.length != bunchedArgs.length) report.error(UnapplyInvalidNumberOfArguments(qual, argTypes), tree.srcPos) argTypes = argTypes.take(args.length) ++ List.fill(argTypes.length - args.length)(WildcardType) - } val unapplyPatterns = bunchedArgs.lazyZip(argTypes) map (typed(_, _)) val result = assignType(cpy.UnApply(tree)(unapplyFn, unapplyImplicits(unapplyApp), unapplyPatterns), ownType) unapp.println(s"unapply patterns = $unapplyPatterns") @@ -1461,8 +1374,6 @@ trait Applications extends Compatibility { val unapplyErr = if (tp.isError) unapplyFn else notAnExtractor(unapplyFn) val typedArgsErr = args mapconserve (typed(_, defn.AnyType)) cpy.UnApply(tree)(unapplyErr, Nil, typedArgsErr) withType unapplyErr.tpe - } - } /** A typed unapply hook, can be overridden by re any-typers between frontend * and pattern matcher. @@ -1473,11 +1384,10 @@ trait Applications extends Compatibility { /** Is given method reference applicable to argument trees `args`? * @param resultType The expected result type of the application */ - def isApplicableMethodRef(methRef: TermRef, args: List[Tree], resultType: Type, keepConstraint: Boolean, argMatch: ArgMatch)(using Context): Boolean = { + def isApplicableMethodRef(methRef: TermRef, args: List[Tree], resultType: Type, keepConstraint: Boolean, argMatch: ArgMatch)(using Context): Boolean = def isApp(using Context): Boolean = new ApplicableToTrees(methRef, args, resultType, argMatch).success if (keepConstraint) isApp else explore(isApp) - } /** Is given method reference applicable to argument types `args`? * @param resultType The expected result type of the application @@ -1489,54 +1399,48 @@ trait Applications extends Compatibility { * @param resultType The expected result type of the application */ def isApplicableType(tp: Type, args: List[Tree], resultType: Type, keepConstraint: Boolean)(using Context): Boolean = - onMethod(tp, args.nonEmpty) { + onMethod(tp, args.nonEmpty): isApplicableMethodRef(_, args, resultType, keepConstraint, ArgMatch.Compatible) - } /** Is given type applicable to argument types `args`, possibly after inserting an `apply`? * @param resultType The expected result type of the application */ def isApplicableType(tp: Type, args: List[Type], resultType: Type)(using Context): Boolean = - onMethod(tp, args.nonEmpty) { + onMethod(tp, args.nonEmpty): isApplicableMethodRef(_, args, resultType, ArgMatch.Compatible) - } - private def onMethod(tp: Type, followApply: Boolean)(p: TermRef => Boolean)(using Context): Boolean = tp match { + private def onMethod(tp: Type, followApply: Boolean)(p: TermRef => Boolean)(using Context): Boolean = tp match case methRef: TermRef if methRef.widenSingleton.isInstanceOf[MethodicType] => p(methRef) case mt: MethodicType => p(mt.narrow) case _ => followApply && tp.member(nme.apply).hasAltWith(d => p(TermRef(tp, nme.apply, d))) - } /** Does `tp` have an extension method named `xname` with this-argument `argType` and * result matching `resultType`? */ - def hasExtensionMethodNamed(tp: Type, xname: TermName, argType: Type, resultType: Type)(using Context) = { + def hasExtensionMethodNamed(tp: Type, xname: TermName, argType: Type, resultType: Type)(using Context) = def qualifies(mbr: Denotation) = mbr.exists && isApplicableType( normalize(tp.select(xname, mbr), WildcardType), argType :: Nil, resultType) - tp.memberBasedOnFlags(xname, required = ExtensionMethod) match { + tp.memberBasedOnFlags(xname, required = ExtensionMethod) match case mbr: SingleDenotation => qualifies(mbr) case mbr => mbr.hasAltWith(qualifies(_)) - } - } /** Drop any leading type or implicit parameter sections */ - def stripInferrable(tp: Type)(using Context): Type = tp match { + def stripInferrable(tp: Type)(using Context): Type = tp match case mt: MethodType if mt.isImplicitMethod => stripInferrable(resultTypeApprox(mt)) case pt: PolyType => stripInferrable(pt.resType) case _ => tp - } /** Drop any leading implicit parameter sections */ - def stripImplicit(tp: Type, wildcardOnly: Boolean = false)(using Context): Type = tp match { + def stripImplicit(tp: Type, wildcardOnly: Boolean = false)(using Context): Type = tp match case mt: MethodType if mt.isImplicitMethod => stripImplicit(resultTypeApprox(mt, wildcardOnly)) case pt: PolyType => @@ -1549,7 +1453,6 @@ trait Applications extends Compatibility { .asInstanceOf[PolyType].flatten case _ => tp - } /** Compare owner inheritance level. * @param sym1 The first owner @@ -1618,7 +1521,7 @@ trait Applications extends Compatibility { * an alternative that takes more implicit parameters wins over one * that takes fewer. */ - def compare(alt1: TermRef, alt2: TermRef)(using Context): Int = trace(i"compare($alt1, $alt2)", overload) { + def compare(alt1: TermRef, alt2: TermRef)(using Context): Int = trace(i"compare($alt1, $alt2)", overload): record("resolveOverloaded.compare") /** Is alternative `alt1` with type `tp1` as specific as alternative @@ -1638,7 +1541,7 @@ trait Applications extends Compatibility { * b. as specific as a member of any other type `tp2` if `tp1` is compatible * with `tp2`. */ - def isAsSpecific(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsSpecific $tp1 $tp2", overload) { + def isAsSpecific(alt1: TermRef, tp1: Type, alt2: TermRef, tp2: Type): Boolean = trace(i"isAsSpecific $tp1 $tp2", overload): tp1 match case tp1: MethodType => // (1) tp1.paramInfos.isEmpty && tp2.isInstanceOf[LambdaType] @@ -1650,7 +1553,7 @@ trait Applications extends Compatibility { isApplicableMethodRef(alt2, tp1.paramInfos, WildcardType, ArgMatch.Compatible) } case tp1: PolyType => // (2) - inContext(ctx.fresh.setExploreTyperState()) { + inContext(ctx.fresh.setExploreTyperState()): // Fully define the PolyType parameters so that the infos of the // tparams created below never contain TypeRefs whose underling types // contain uninstantiated TypeVars, this could lead to cycles in @@ -1661,7 +1564,6 @@ trait Applications extends Compatibility { val tparams = newTypeParams(alt1.symbol, tp1.paramNames, EmptyFlags, tp1.instantiateParamInfos(_)) isAsSpecific(alt1, tp1.instantiate(tparams.map(_.typeRef)), alt2, tp2) - } case _ => // (3) tp2 match case tp2: MethodType => true // (3a) @@ -1670,7 +1572,6 @@ trait Applications extends Compatibility { explore(isAsSpecificValueType(tp1, instantiateWithTypeVars(tp2))) case _ => // 3b) isAsSpecificValueType(tp1, tp2) - } /** Test whether value type `tp1` is as specific as value type `tp2`. * Let's abbreviate this to `tp1 <:s tp2`. @@ -1705,24 +1606,20 @@ trait Applications extends Compatibility { def isAsSpecificValueType(tp1: Type, tp2: Type)(using Context) = if (ctx.mode.is(Mode.OldOverloadingResolution)) isCompatible(tp1, tp2) - else { - val flip = new TypeMap { - def apply(t: Type) = t match { + else + val flip = new TypeMap: + def apply(t: Type) = t match case t @ AppliedType(tycon, args) => def mapArg(arg: Type, tparam: TypeParamInfo) = if (variance > 0 && tparam.paramVarianceSign < 0) defn.FunctionOf(arg :: Nil, defn.UnitType) else arg mapOver(t.derivedAppliedType(tycon, args.zipWithConserve(tycon.typeParams)(mapArg))) case _ => mapOver(t) - } - } - def prepare(tp: Type) = tp.stripTypeVar match { + def prepare(tp: Type) = tp.stripTypeVar match case tp: NamedType if tp.symbol.is(Module) && tp.symbol.sourceModule.is(Given) => flip(tp.widen.widenToParents) case _ => flip(tp) - } (prepare(tp1) relaxed_<:< prepare(tp2)) || viewExists(tp1, tp2) - } /** Widen the result type of synthetic given methods from the implementation class to the * type that's implemented. Example @@ -1743,7 +1640,7 @@ trait Applications extends Compatibility { * objects, since these are anyway taken to be more specific than methods * (by condition 3a above). */ - def widenGiven(tp: Type, alt: TermRef): Type = tp match { + def widenGiven(tp: Type, alt: TermRef): Type = tp match case mt: MethodType if mt.isImplicitMethod => mt.derivedLambdaType(mt.paramNames, mt.paramInfos, widenGiven(mt.resultType, alt)) case pt: PolyType => @@ -1751,9 +1648,8 @@ trait Applications extends Compatibility { case rt => if alt.symbol.isCoDefinedGiven(rt.typeSymbol) then tp.widenToParents else tp - } - def compareWithTypes(tp1: Type, tp2: Type) = { + def compareWithTypes(tp1: Type, tp2: Type) = val ownerScore = compareOwner(alt1.symbol.maybeOwner, alt2.symbol.maybeOwner) def winsType1 = isAsSpecific(alt1, tp1, alt2, tp2) def winsType2 = isAsSpecific(alt2, tp2, alt1, tp1) @@ -1767,7 +1663,6 @@ trait Applications extends Compatibility { if (winsType2) 0 else 1 else if (winsType2) -1 else 0 - } if alt1.symbol.is(ConstructorProxy) && !alt2.symbol.is(ConstructorProxy) then -1 else if alt2.symbol.is(ConstructorProxy) && !alt1.symbol.is(ConstructorProxy) then 1 @@ -1784,40 +1679,33 @@ trait Applications extends Compatibility { else 1 // prefer 1st alternative with no implicits else if (strippedType2 eq fullType2) -1 // prefer 2nd alternative with no implicits else compareWithTypes(fullType1, fullType2) // continue by comparing implicits parameters - } end compare - def narrowMostSpecific(alts: List[TermRef])(using Context): List[TermRef] = { + def narrowMostSpecific(alts: List[TermRef])(using Context): List[TermRef] = record("narrowMostSpecific") - alts match { + alts match case Nil => alts case _ :: Nil => alts case alt1 :: alt2 :: Nil => - compare(alt1, alt2) match { + compare(alt1, alt2) match case 1 => alt1 :: Nil case -1 => alt2 :: Nil case 0 => alts - } case alt :: alts1 => - def survivors(previous: List[TermRef], alts: List[TermRef]): List[TermRef] = alts match { + def survivors(previous: List[TermRef], alts: List[TermRef]): List[TermRef] = alts match case alt :: alts1 => - compare(previous.head, alt) match { + compare(previous.head, alt) match case 1 => survivors(previous, alts1) case -1 => survivors(alt :: previous.tail, alts1) case 0 => survivors(alt :: previous, alts1) - } case Nil => previous - } val best :: rest = survivors(alt :: Nil, alts1): @unchecked - def asGood(alts: List[TermRef]): List[TermRef] = alts match { + def asGood(alts: List[TermRef]): List[TermRef] = alts match case alt :: alts1 => if (compare(alt, best) < 0) asGood(alts1) else alt :: asGood(alts1) case nil => Nil - } best :: asGood(rest) - } - } /** Resolve overloaded alternative `alts`, given expected type `pt`. * Two trials: First, without implicits or SAM conversions enabled. Then, @@ -1831,15 +1719,13 @@ trait Applications extends Compatibility { * is a `IgnoredProto`, pick the underlying type instead. */ def resultConforms(altSym: Symbol, altType: Type, resultType: Type)(using Context): Boolean = - resultType.revealIgnored match { + resultType.revealIgnored match case resultType: ValueType => - altType.widen match { + altType.widen match case tp: PolyType => resultConforms(altSym, instantiateWithTypeVars(tp), resultType) case tp: MethodType => constrainResult(altSym, tp.resultType, resultType) case _ => true - } case _ => true - } /** If the `chosen` alternative has a result type incompatible with the expected result * type `pt`, run overloading resolution again on all alternatives that do match `pt`. @@ -1853,21 +1739,18 @@ trait Applications extends Compatibility { * probability of pruning the search. result type comparisons are neither cheap nor * do they prune much, on average. */ - def adaptByResult(chosen: TermRef, alts: List[TermRef]) = pt match { + def adaptByResult(chosen: TermRef, alts: List[TermRef]) = pt match case pt: FunProto if !explore(resultConforms(chosen.symbol, chosen, pt.resultType)) => val conformingAlts = alts.filterConserve(alt => (alt ne chosen) && explore(resultConforms(alt.symbol, alt, pt.resultType))) - conformingAlts match { + conformingAlts match case Nil => chosen case alt2 :: Nil => alt2 case alts2 => - resolveOverloaded(alts2, pt) match { + resolveOverloaded(alts2, pt) match case alt2 :: Nil => alt2 case _ => chosen - } - } case _ => chosen - } def resolve(alts: List[TermRef]): List[TermRef] = pt match @@ -1891,23 +1774,20 @@ trait Applications extends Compatibility { * - the result is applied to value arguments and alternative is not a method, or * - the result is applied to type arguments and alternative is not polymorphic */ - val tryApply: Type => Boolean = alt => pt match { + val tryApply: Type => Boolean = alt => pt match case pt: FunProto => !alt.widen.stripPoly.isInstanceOf[MethodType] case pt: PolyProto => !alt.widen.isInstanceOf[PolyType] case _ => false - } /** Replace each alternative by its apply members where necessary */ def applyMembers(alt: TermRef): List[TermRef] = - if (tryApply(alt)) { - val qual = alt.widen match { + if (tryApply(alt)) + val qual = alt.widen match case pt: PolyType => wildApprox(pt.resultType) case _ => alt - } qual.member(nme.apply).alternatives.map(TermRef(alt, nme.apply, _)) - } else alt :: Nil /** Fall back from an apply method to its original alternative */ @@ -1916,10 +1796,9 @@ trait Applications extends Compatibility { alts.find(_.symbol == alt.prefix.termSymbol).getOrElse(alt) else alt - if (alts.exists(tryApply)) { + if (alts.exists(tryApply)) val expanded = alts.flatMap(applyMembers) resolve(expanded).map(retract) - } else resolve(alts) end resolveOverloaded @@ -1929,36 +1808,34 @@ trait Applications extends Compatibility { * implicits and SAM conversions enabled, and once without. */ private def resolveOverloaded1(alts: List[TermRef], pt: Type)(using Context): List[TermRef] = - trace(i"resolve over $alts%, %, pt = $pt", typr, show = true) { - record(s"resolveOverloaded1", alts.length) + trace(i"resolve over $alts%, %, pt = $pt", typr, show = true): + record(s"resolveOverloaded1", alts.length) - def isDetermined(alts: List[TermRef]) = alts.isEmpty || alts.tail.isEmpty + def isDetermined(alts: List[TermRef]) = alts.isEmpty || alts.tail.isEmpty /** The shape of given tree as a type; cannot handle named arguments. */ - def typeShape(tree: untpd.Tree): Type = tree match { - case untpd.Function(args, body) => - defn.FunctionOf( + def typeShape(tree: untpd.Tree): Type = tree match + case untpd.Function(args, body) => + defn.FunctionOf( args.map(Function.const(defn.AnyType)), typeShape(body), isContextual = untpd.isContextualClosure(tree)) - case Match(EmptyTree, _) => - defn.PartialFunctionClass.typeRef.appliedTo(defn.AnyType :: defn.NothingType :: Nil) - case _ => - defn.NothingType - } + case Match(EmptyTree, _) => + defn.PartialFunctionClass.typeRef.appliedTo(defn.AnyType :: defn.NothingType :: Nil) + case _ => + defn.NothingType /** The shape of given tree as a type; is more expensive than * typeShape but can can handle named arguments. */ - def treeShape(tree: untpd.Tree): Tree = tree match { - case NamedArg(name, arg) => - val argShape = treeShape(arg) - cpy.NamedArg(tree)(name, argShape).withType(argShape.tpe) - case _ => - dummyTreeOfType(typeShape(tree)) - } + def treeShape(tree: untpd.Tree): Tree = tree match + case NamedArg(name, arg) => + val argShape = treeShape(arg) + cpy.NamedArg(tree)(name, argShape).withType(argShape.tpe) + case _ => + dummyTreeOfType(typeShape(tree)) - def narrowByTypes(alts: List[TermRef], argTypes: List[Type], resultType: Type): List[TermRef] = - alts.filterConserve(isApplicableMethodRef(_, argTypes, resultType, ArgMatch.CompatibleCAP)) + def narrowByTypes(alts: List[TermRef], argTypes: List[Type], resultType: Type): List[TermRef] = + alts.filterConserve(isApplicableMethodRef(_, argTypes, resultType, ArgMatch.CompatibleCAP)) /** Normalization steps before checking arguments: * @@ -1968,106 +1845,105 @@ trait Applications extends Compatibility { * is an n-ary function, and at least one alternative has a corresponding * formal parameter that is a unary function. */ - def normArg(alts: List[TermRef], arg: untpd.Tree, idx: Int): untpd.Tree = arg match - case Block(Nil, expr) if !expr.isEmpty => normArg(alts, expr, idx) - case untpd.Function(args: List[untpd.ValDef] @unchecked, body) => + def normArg(alts: List[TermRef], arg: untpd.Tree, idx: Int): untpd.Tree = arg match + case Block(Nil, expr) if !expr.isEmpty => normArg(alts, expr, idx) + case untpd.Function(args: List[untpd.ValDef] @unchecked, body) => // If ref refers to a method whose parameter at index `idx` is a function type, // the arity of that function, otherise -1. - def paramCount(ref: TermRef) = - val formals = ref.widen.firstParamTypes - if formals.length > idx then - formals(idx) match - case defn.FunctionOf(args, _, _) => args.length - case _ => -1 - else -1 - - val numArgs = args.length - if numArgs != 1 - && !alts.exists(paramCount(_) == numArgs) - && alts.exists(paramCount(_) == 1) - then - desugar.makeTupledFunction(args, body, isGenericTuple = true) + def paramCount(ref: TermRef) = + val formals = ref.widen.firstParamTypes + if formals.length > idx then + formals(idx) match + case defn.FunctionOf(args, _, _) => args.length + case _ => -1 + else -1 + + val numArgs = args.length + if numArgs != 1 + && !alts.exists(paramCount(_) == numArgs) + && alts.exists(paramCount(_) == 1) + then + desugar.makeTupledFunction(args, body, isGenericTuple = true) // `isGenericTuple = true` is the safe choice here. It means the i'th tuple // element is selected with `(i)` instead of `_i`, which gives the same code // in the end, but the compilation time and the ascribed type are more involved. // It also means that -Ytest-pickler -Xprint-types fails for sources exercising // the idiom since after pickling the target is known, so _i is used directly. - else arg - case _ => arg - end normArg - - val candidates = pt match { - case pt @ FunProto(args, resultType) => - val numArgs = args.length - def sizeFits(alt: TermRef): Boolean = alt.widen.stripPoly match { - case tp: MethodType => - val ptypes = tp.paramInfos - val numParams = ptypes.length - def isVarArgs = ptypes.nonEmpty && ptypes.last.isRepeatedParam - def numDefaultParams = - if alt.symbol.hasDefaultParams then - val fn = ref(alt, needLoad = false) - ptypes.indices.count(n => !findDefaultGetter(fn, n, testOnly = true).isEmpty) - else 0 - if numParams < numArgs then isVarArgs - else if numParams == numArgs then true - else - val numNecessaryArgs = numParams - numDefaultParams - if numNecessaryArgs <= numArgs then true - else if numNecessaryArgs == numArgs + 1 then isVarArgs - else false - case _ => - numArgs == 0 - } + else arg + case _ => arg + end normArg + + val candidates = pt match + case pt @ FunProto(args, resultType) => + val numArgs = args.length + def sizeFits(alt: TermRef): Boolean = alt.widen.stripPoly match + case tp: MethodType => + val ptypes = tp.paramInfos + val numParams = ptypes.length + def isVarArgs = ptypes.nonEmpty && ptypes.last.isRepeatedParam + def numDefaultParams = + if alt.symbol.hasDefaultParams then + val fn = ref(alt, needLoad = false) + ptypes.indices.count(n => !findDefaultGetter(fn, n, testOnly = true).isEmpty) + else 0 + if numParams < numArgs then isVarArgs + else if numParams == numArgs then true + else + val numNecessaryArgs = numParams - numDefaultParams + if numNecessaryArgs <= numArgs then true + else if numNecessaryArgs == numArgs + 1 then isVarArgs + else false + case _ => + numArgs == 0 - def narrowBySize(alts: List[TermRef]): List[TermRef] = - alts.filterConserve(sizeFits(_)) + def narrowBySize(alts: List[TermRef]): List[TermRef] = + alts.filterConserve(sizeFits(_)) - def narrowByShapes(alts: List[TermRef]): List[TermRef] = - if args.exists(untpd.isFunctionWithUnknownParamType) then - val normArgs = args.mapWithIndexConserve(normArg(alts, _, _)) - if hasNamedArg(args) then narrowByTrees(alts, normArgs.map(treeShape), resultType) - else narrowByTypes(alts, normArgs.map(typeShape), resultType) - else - alts + def narrowByShapes(alts: List[TermRef]): List[TermRef] = + if args.exists(untpd.isFunctionWithUnknownParamType) then + val normArgs = args.mapWithIndexConserve(normArg(alts, _, _)) + if hasNamedArg(args) then narrowByTrees(alts, normArgs.map(treeShape), resultType) + else narrowByTypes(alts, normArgs.map(typeShape), resultType) + else + alts - def narrowByTrees(alts: List[TermRef], args: List[Tree], resultType: Type): List[TermRef] = - alts.filterConserve(alt => - isApplicableMethodRef(alt, args, resultType, keepConstraint = false, ArgMatch.CompatibleCAP) - ) + def narrowByTrees(alts: List[TermRef], args: List[Tree], resultType: Type): List[TermRef] = + alts.filterConserve(alt => + isApplicableMethodRef(alt, args, resultType, keepConstraint = false, ArgMatch.CompatibleCAP) + ) - record("resolveOverloaded.FunProto", alts.length) - val alts1 = narrowBySize(alts) - overload.println(i"narrowed by size: ${alts1.map(_.symbol.showDcl)}%, %") - if isDetermined(alts1) then alts1 - else - record("resolveOverloaded.narrowedBySize", alts1.length) - val alts2 = narrowByShapes(alts1) - overload.println(i"narrowed by shape: ${alts2.map(_.symbol.showDcl)}%, %") - if isDetermined(alts2) then alts2 + record("resolveOverloaded.FunProto", alts.length) + val alts1 = narrowBySize(alts) + overload.println(i"narrowed by size: ${alts1.map(_.symbol.showDcl)}%, %") + if isDetermined(alts1) then alts1 else - record("resolveOverloaded.narrowedByShape", alts2.length) - pretypeArgs(alts2, pt) - narrowByTrees(alts2, pt.typedArgs(normArg(alts2, _, _)), resultType) + record("resolveOverloaded.narrowedBySize", alts1.length) + val alts2 = narrowByShapes(alts1) + overload.println(i"narrowed by shape: ${alts2.map(_.symbol.showDcl)}%, %") + if isDetermined(alts2) then alts2 + else + record("resolveOverloaded.narrowedByShape", alts2.length) + pretypeArgs(alts2, pt) + narrowByTrees(alts2, pt.typedArgs(normArg(alts2, _, _)), resultType) - case pt @ PolyProto(targs1, pt1) => - val alts1 = alts.filterConserve(pt.canInstantiate) - if isDetermined(alts1) then alts1 - else - def withinBounds(alt: TermRef) = alt.widen match - case tp: PolyType => - TypeOps.boundsViolations(targs1, tp.paramInfos, _.substParams(tp, _), NoType).isEmpty - val alts2 = alts1.filter(withinBounds) - if isDetermined(alts2) then alts2 - else resolveMapped(alts1, _.widen.appliedTo(targs1.tpes), pt1) - - case defn.FunctionOf(args, resultType, _) => - narrowByTypes(alts, args, resultType) - - case pt => - val compat = alts.filterConserve(normalizedCompatible(_, pt, keepConstraint = false)) - if (compat.isEmpty) + case pt @ PolyProto(targs1, pt1) => + val alts1 = alts.filterConserve(pt.canInstantiate) + if isDetermined(alts1) then alts1 + else + def withinBounds(alt: TermRef) = alt.widen match + case tp: PolyType => + TypeOps.boundsViolations(targs1, tp.paramInfos, _.substParams(tp, _), NoType).isEmpty + val alts2 = alts1.filter(withinBounds) + if isDetermined(alts2) then alts2 + else resolveMapped(alts1, _.widen.appliedTo(targs1.tpes), pt1) + + case defn.FunctionOf(args, resultType, _) => + narrowByTypes(alts, args, resultType) + + case pt => + val compat = alts.filterConserve(normalizedCompatible(_, pt, keepConstraint = false)) + if (compat.isEmpty) /* * the case should not be moved to the enclosing match * since SAM type must be considered only if there are no candidates @@ -2076,85 +1952,81 @@ trait Applications extends Compatibility { * def f: java.io.OutputStream = ??? * new java.io.ObjectOutputStream(f) */ - pt match { - case SAMType(mtp) => - narrowByTypes(alts, mtp.paramInfos, mtp.resultType) - case _ => + pt match + case SAMType(mtp) => + narrowByTypes(alts, mtp.paramInfos, mtp.resultType) + case _ => // pick any alternatives that are not methods since these might be convertible // to the expected type, or be used as extension method arguments. - val convertible = alts.filterNot(alt => + val convertible = alts.filterNot(alt => normalize(alt, IgnoredProto(pt)).widenSingleton.isInstanceOf[MethodType]) - if convertible.length == 1 then convertible else compat - } - else compat - } + if convertible.length == 1 then convertible else compat + else compat /** The type of alternative `alt` after instantiating its first parameter * clause with `argTypes`. In addition, if the resulting type is a PolyType * and `typeArgs` matches its parameter list, instantiate the result with `typeArgs`. */ - def skipParamClause(argTypes: List[Type], typeArgs: List[Type])(alt: TermRef): Type = - def skip(tp: Type): Type = tp match { - case tp: PolyType => - skip(tp.resultType) match - case NoType => - NoType - case rt: PolyType if typeArgs.length == rt.paramInfos.length => - tp.derivedLambdaType(resType = rt.instantiate(typeArgs)) - case rt => - tp.derivedLambdaType(resType = rt).asInstanceOf[PolyType].flatten - case tp: MethodType => - tp.instantiate(argTypes) - case _ => - NoType - } - skip(alt.widen) + def skipParamClause(argTypes: List[Type], typeArgs: List[Type])(alt: TermRef): Type = + def skip(tp: Type): Type = tp match + case tp: PolyType => + skip(tp.resultType) match + case NoType => + NoType + case rt: PolyType if typeArgs.length == rt.paramInfos.length => + tp.derivedLambdaType(resType = rt.instantiate(typeArgs)) + case rt => + tp.derivedLambdaType(resType = rt).asInstanceOf[PolyType].flatten + case tp: MethodType => + tp.instantiate(argTypes) + case _ => + NoType + skip(alt.widen) - def resultIsMethod(tp: Type): Boolean = tp.widen.stripPoly match - case tp: MethodType => stripInferrable(tp.resultType).isInstanceOf[MethodType] - case _ => false + def resultIsMethod(tp: Type): Boolean = tp.widen.stripPoly match + case tp: MethodType => stripInferrable(tp.resultType).isInstanceOf[MethodType] + case _ => false - record("resolveOverloaded.narrowedApplicable", candidates.length) - if pt.unusableForInference then + record("resolveOverloaded.narrowedApplicable", candidates.length) + if pt.unusableForInference then // `pt` might have become erroneous by typing arguments of FunProtos. // If `pt` is erroneous, don't try to go further; report the error in `pt` instead. - candidates - else - val found = narrowMostSpecific(candidates) - if found.length <= 1 then found + candidates else - val deepPt = pt.deepenProto - deepPt match - case pt @ FunProto(_, PolyProto(targs, resType)) => + val found = narrowMostSpecific(candidates) + if found.length <= 1 then found + else + val deepPt = pt.deepenProto + deepPt match + case pt @ FunProto(_, PolyProto(targs, resType)) => // try to narrow further with snd argument list and following type params - resolveMapped(candidates, + resolveMapped(candidates, skipParamClause(pt.typedArgs().tpes, targs.tpes), resType) - case pt @ FunProto(_, resType: FunOrPolyProto) => + case pt @ FunProto(_, resType: FunOrPolyProto) => // try to narrow further with snd argument list - resolveMapped(candidates, + resolveMapped(candidates, skipParamClause(pt.typedArgs().tpes, Nil), resType) - case _ => + case _ => // prefer alternatives that need no eta expansion - val noCurried = alts.filterConserve(!resultIsMethod(_)) - val noCurriedCount = noCurried.length - if noCurriedCount == 1 then - noCurried - else if noCurriedCount > 1 && noCurriedCount < alts.length then - resolveOverloaded1(noCurried, pt) - else + val noCurried = alts.filterConserve(!resultIsMethod(_)) + val noCurriedCount = noCurried.length + if noCurriedCount == 1 then + noCurried + else if noCurriedCount > 1 && noCurriedCount < alts.length then + resolveOverloaded1(noCurried, pt) + else // prefer alternatves that match without default parameters - val noDefaults = alts.filterConserve(!_.symbol.hasDefaultParams) - val noDefaultsCount = noDefaults.length - if noDefaultsCount == 1 then - noDefaults - else if noDefaultsCount > 1 && noDefaultsCount < alts.length then - resolveOverloaded1(noDefaults, pt) - else if deepPt ne pt then + val noDefaults = alts.filterConserve(!_.symbol.hasDefaultParams) + val noDefaultsCount = noDefaults.length + if noDefaultsCount == 1 then + noDefaults + else if noDefaultsCount > 1 && noDefaultsCount < alts.length then + resolveOverloaded1(noDefaults, pt) + else if deepPt ne pt then // try again with a deeper known expected type - resolveOverloaded1(alts, deepPt) - else - candidates - } + resolveOverloaded1(alts, deepPt) + else + candidates end resolveOverloaded1 /** The largest suffix of `paramss` that has the same first parameter name as `t`, @@ -2218,21 +2090,20 @@ trait Applications extends Compatibility { * With `pretypeArgs`, we use the `Char => ?` as the expected type of the * closure `x => x.toUpper`, which makes the code typecheck. */ - private def pretypeArgs(alts: List[TermRef], pt: FunProto)(using Context): Unit = { - def recur(altFormals: List[List[Type]], args: List[untpd.Tree]): Unit = args match { + private def pretypeArgs(alts: List[TermRef], pt: FunProto)(using Context): Unit = + def recur(altFormals: List[List[Type]], args: List[untpd.Tree]): Unit = args match case arg :: args1 if !altFormals.exists(_.isEmpty) => def isUniform[T](xs: List[T])(p: (T, T) => Boolean) = xs.forall(p(_, xs.head)) val formalsForArg: List[Type] = altFormals.map(_.head) def argTypesOfFormal(formal: Type): List[Type] = - formal.dealias match { + formal.dealias match case defn.FunctionOf(args, result, isImplicit) => args case defn.PartialFunctionOf(arg, result) => arg :: Nil case _ => Nil - } val formalParamTypessForArg: List[List[Type]] = formalsForArg.map(argTypesOfFormal) if (formalParamTypessForArg.forall(_.nonEmpty) && - isUniform(formalParamTypessForArg)((x, y) => x.length == y.length)) { + isUniform(formalParamTypessForArg)((x, y) => x.length == y.length)) val commonParamTypes = formalParamTypessForArg.transpose.map(ps => // Given definitions above, for i = 1,...,m, // ps(i) = List(p_i_1, ..., p_i_n) -- i.e. a column @@ -2251,61 +2122,50 @@ trait Applications extends Compatibility { else defn.FunctionOf(commonParamTypes, WildcardType, isContextual = untpd.isContextualClosure(arg)) overload.println(i"pretype arg $arg with expected type $commonFormal") if (commonParamTypes.forall(isFullyDefined(_, ForceDegree.flipBottom))) - withMode(Mode.ImplicitsEnabled) { + withMode(Mode.ImplicitsEnabled): // We can cache the adapted argument here because the expected type // is a common type shared by all overloading candidates. pt.cacheArg(arg, pt.typedArg(arg, commonFormal)) - } - } recur(altFormals.map(_.tail), args1) case _ => - } recur(alts.map(_.widen.firstParamTypes), pt.args) - } - private def harmonizeWith[T <: AnyRef](ts: List[T])(tpe: T => Type, adapt: (T, Type) => T)(using Context): List[T] = { - def targetClass(ts: List[T], cls: Symbol, intLitSeen: Boolean): Symbol = ts match { + private def harmonizeWith[T <: AnyRef](ts: List[T])(tpe: T => Type, adapt: (T, Type) => T)(using Context): List[T] = + def targetClass(ts: List[T], cls: Symbol, intLitSeen: Boolean): Symbol = ts match case t :: ts1 => - tpe(t).widenTermRefExpr match { + tpe(t).widenTermRefExpr match case ConstantType(c: Constant) if c.tag == IntTag => targetClass(ts1, cls, true) case t => val sym = t.classSymbol if (!sym.isNumericValueClass || cls.exists && cls != sym) NoSymbol else targetClass(ts1, sym, intLitSeen) - } case Nil => if (cls != defn.IntClass && intLitSeen) cls else NoSymbol - } val cls = targetClass(ts, NoSymbol, false) - if (cls.exists) { + if (cls.exists) def lossOfPrecision(n: Int): Boolean = cls == defn.FloatClass && n.toFloat.toInt != n var canAdapt = true val ts1 = ts.mapConserve { t => - tpe(t).widenTermRefExpr match { + tpe(t).widenTermRefExpr match case ConstantType(c: Constant) if c.tag == IntTag => canAdapt &= c.convertTo(cls.typeRef) != null && !lossOfPrecision(c.intValue) if (canAdapt) adapt(t, cls.typeRef) else t case _ => t - } } if (canAdapt) ts1 else ts - } else ts - } /** If `trees` all have numeric value types, and they do not have all the same type, * pick a common numeric supertype and try to convert all constant Int literals to this type. * If the resulting trees all have the same type, return them instead of the original ones. */ - def harmonize(trees: List[Tree])(using Context): List[Tree] = { - def adaptDeep(tree: Tree, pt: Type): Tree = tree match { + def harmonize(trees: List[Tree])(using Context): List[Tree] = + def adaptDeep(tree: Tree, pt: Type): Tree = tree match case cdef: CaseDef => tpd.cpy.CaseDef(cdef)(body = adaptDeep(cdef.body, pt)) case _ => adapt(tree, pt) - } if (ctx.isAfterTyper) trees else harmonizeWith(trees)(_.tpe, adaptDeep) - } /** Apply a transformation `harmonize` on the results of operation `op`, * unless the expected type `pt` is fully defined. @@ -2324,13 +2184,12 @@ trait Applications extends Compatibility { * which could lead to unsoundness. */ def harmonic[T](harmonize: List[T] => List[T], pt: Type)(op: => List[T])(using Context): List[T] = - if (!isFullyDefined(pt, ForceDegree.none)) { + if (!isFullyDefined(pt, ForceDegree.none)) val origConstraint = ctx.typerState.constraint val origElems = op val harmonizedElems = harmonize(origElems) if (harmonizedElems ne origElems) ctx.typerState.constraint = origConstraint harmonizedElems - } else op /** If all `types` are numeric value types, and they are not all the same type, @@ -2347,7 +2206,7 @@ trait Applications extends Compatibility { * * where comes from `pt` if it is a (possibly ignored) PolyProto. */ - def extMethodApply(methodRef: untpd.Tree, receiver: Tree, pt: Type)(using Context): Tree = { + def extMethodApply(methodRef: untpd.Tree, receiver: Tree, pt: Type)(using Context): Tree = /** Integrate the type arguments (if any) from `currentPt` into `tree`, and produce * an expected type that hides the appropriate amount of information through IgnoreProto. */ @@ -2362,12 +2221,10 @@ trait Applications extends Compatibility { (tree, currentPt) val (core, pt1) = normalizePt(methodRef, pt) - withMode(Mode.SynthesizeExtMethodReceiver) { + withMode(Mode.SynthesizeExtMethodReceiver): typed( untpd.Apply(core, untpd.TypedSplice(receiver, isExtensionReceiver = true) :: Nil), pt1, ctx.typerState.ownedVars) - } - } /** Assuming methodRef is a reference to an extension method defined e.g. as * @@ -2404,14 +2261,13 @@ trait Applications extends Compatibility { val newCtx = ctx.fresh.setNewScope.setReporter(new reporting.ThrowingReporter(ctx.reporter)) try - val appliedTree = inContext(newCtx) { + val appliedTree = inContext(newCtx): // Introducing an auxiliary symbol in a temporary scope. // Entering the symbol indirectly by `newCtx.enter` // could instead add the symbol to the enclosing class // which could break the REPL. newCtx.scope.openForMutations.enter(truncatedSym) newCtx.typer.extMethodApply(truncatedRefTree, receiver, WildcardType) - } if appliedTree.tpe.exists && !appliedTree.tpe.isError then Some(replaceCallee(appliedTree, methodRefTree)) else @@ -2427,4 +2283,3 @@ trait Applications extends Compatibility { val captured = captureWildcards(tp) if (captured ne tp) && isCompatible(captured, pt) then captured else tp -} diff --git a/compiler/src/dotty/tools/dotc/typer/Checking.scala b/compiler/src/dotty/tools/dotc/typer/Checking.scala index 2869f64f33d0..8a2ed40394a7 100644 --- a/compiler/src/dotty/tools/dotc/typer/Checking.scala +++ b/compiler/src/dotty/tools/dotc/typer/Checking.scala @@ -43,7 +43,7 @@ import transform.TypeUtils.* import collection.mutable import reporting._ -object Checking { +object Checking: import tpd._ /** Add further information for error messages involving applied types if the @@ -109,7 +109,7 @@ object Checking { * @param tpt If `tree` is synthesized from a type in a TypeTree, * the original TypeTree, or EmptyTree otherwise. */ - def checkAppliedType(tree: AppliedTypeTree, tpt: Tree = EmptyTree)(using Context): Unit = { + def checkAppliedType(tree: AppliedTypeTree, tpt: Tree = EmptyTree)(using Context): Unit = val AppliedTypeTree(tycon, args) = tree // If `args` is a list of named arguments, return corresponding type parameters, // otherwise return type parameters unchanged @@ -127,18 +127,16 @@ object Checking { then checkBounds(args, bounds, instantiate, tree.tpe, tpt) - def checkWildcardApply(tp: Type): Unit = tp match { + def checkWildcardApply(tp: Type): Unit = tp match case tp @ AppliedType(tycon, _) => if tp.isUnreducibleWild then report.errorOrMigrationWarning( showInferred(UnreducibleApplication(tycon), tp, tpt), tree.srcPos, from = `3.0`) case _ => - } def checkValidIfApply(using Context): Unit = checkWildcardApply(tycon.tpe.appliedTo(args.map(_.tpe))) withMode(Mode.AllowLambdaWildcardApply)(checkValidIfApply) - } /** Check all applied type trees in inferred type `tpt` for well-formedness */ def checkAppliedTypesIn(tpt: TypeTree)(using Context): Unit = @@ -154,10 +152,9 @@ object Checking { traverseChildren(tp) checker.traverse(tpt.tpe) - def checkNoWildcard(tree: Tree)(using Context): Tree = tree.tpe match { + def checkNoWildcard(tree: Tree)(using Context): Tree = tree.tpe match case tpe: TypeBounds => errorTree(tree, em"no wildcard type allowed here") case _ => tree - } /** Check that kind of `arg` has the same outline as the kind of paramBounds. * E.g. if `paramBounds` has kind * -> *, `arg` must have that kind as well, @@ -175,12 +172,11 @@ object Checking { arg.tpe.hasSameKindAs(paramBounds.bounds.hi)) arg else errorTree(arg, em"Type argument ${arg.tpe} does not have the same kind as its bound $paramBounds") - def preCheckKinds(args: List[Tree], paramBoundss: List[Type])(using Context): List[Tree] = { + def preCheckKinds(args: List[Tree], paramBoundss: List[Type])(using Context): List[Tree] = val args1 = args.zipWithConserve(paramBoundss)(preCheckKind) args1 ++ args.drop(paramBoundss.length) // add any arguments that do not correspond to a parameter back, // so the wrong number of parameters is reported afterwards. - } /** Check that `tp` refers to a nonAbstract class * and that the instance conforms to the self type of the created class. @@ -189,10 +185,9 @@ object Checking { tp.underlyingClassRef(refinementOK = false) match case tref: TypeRef => val cls = tref.symbol - if (cls.isOneOf(AbstractOrTrait)) { + if (cls.isOneOf(AbstractOrTrait)) val srcCls = srcTp.underlyingClassRef(refinementOK = false).typeSymbol report.error(CantInstantiateAbstractClassOrTrait(srcCls, isTrait = srcCls.is(Trait)), pos) - } if !cls.is(Module) then // Create a synthetic singleton type instance, and check whether // it conforms to the self type of the class as seen from that instance. @@ -203,12 +198,11 @@ object Checking { case _ => /** Check that type `tp` is realizable. */ - def checkRealizable(tp: Type, pos: SrcPos, what: String = "path")(using Context): Unit = { + def checkRealizable(tp: Type, pos: SrcPos, what: String = "path")(using Context): Unit = val rstatus = realizability(tp) if (rstatus ne Realizable) report.errorOrMigrationWarning( em"$tp is not a legal $what\nsince it${rstatus.msg}", pos, from = `3.0`) - } /** Given a parent `parent` of a class `cls`, if `parent` is a trait check that * the superclass of `cls` derived from the superclass of `parent`. @@ -222,7 +216,7 @@ object Checking { * The standard library relies on this idiom. */ def checkTraitInheritance(parent: Symbol, cls: ClassSymbol, pos: SrcPos)(using Context): Unit = - parent match { + parent match case parent: ClassSymbol if parent.is(Trait) => val psuper = parent.superClass val csuper = cls.superClass @@ -232,12 +226,11 @@ object Checking { if (!ok) report.error(em"illegal trait inheritance: super$csuper does not derive from $parent's super$psuper", pos) case _ => - } /** A type map which checks that the only cycles in a type are F-bounds * and that protects all F-bounded references by LazyRefs. */ - class CheckNonCyclicMap(sym: Symbol, reportErrors: Boolean)(using Context) extends TypeMap { + class CheckNonCyclicMap(sym: Symbol, reportErrors: Boolean)(using Context) extends TypeMap: /** Set of type references whose info is currently checked */ private val locked = mutable.Set[TypeRef]() @@ -259,22 +252,20 @@ object Checking { private def checkPart(tp: Type, w: String) = try apply(tp) - finally { + finally where = w lastChecked = tp - } - private def checkUpper(tp: Type, w: String) = { + private def checkUpper(tp: Type, w: String) = val saved = nestedCycleOK nestedCycleOK = true try checkPart(tp, w) finally nestedCycleOK = saved - } /** Check info `tp` for cycles. Throw CyclicReference for illegal cycles, * break direct cycle with a LazyRef for legal, F-bounded cycles. */ - def checkInfo(tp: Type): Type = tp match { + def checkInfo(tp: Type): Type = tp match case tp @ TypeAlias(alias) => tp.derivedAlias(checkPart(alias, "alias")) case tp @ MatchAlias(alias) => @@ -283,21 +274,18 @@ object Checking { tp.derivedTypeBounds(checkPart(lo, "lower bound"), checkUpper(hi, "upper bound")) case _ => tp - } - private def apply(tp: Type, cycleOK: Boolean, nestedCycleOK: Boolean): Type = { + private def apply(tp: Type, cycleOK: Boolean, nestedCycleOK: Boolean): Type = val savedCycleOK = this.cycleOK val savedNestedCycleOK = this.nestedCycleOK this.cycleOK = cycleOK this.nestedCycleOK = nestedCycleOK try apply(tp) - finally { + finally this.cycleOK = savedCycleOK this.nestedCycleOK = savedNestedCycleOK - } - } - def apply(tp: Type): Type = tp match { + def apply(tp: Type): Type = tp match case tp: TermRef => this(tp.info) mapOver(tp) @@ -308,12 +296,12 @@ object Checking { case tp: RecType => tp.rebind(this(tp.parent)) case tp @ TypeRef(pre, _) => - try { + try // A prefix is interesting if it might contain (transitively) a reference // to symbol `sym` itself. We only check references with interesting // prefixes for cycles. This pruning is done in order not to force // global symbols when doing the cyclicity check. - def isInteresting(prefix: Type): Boolean = prefix.stripTypeVar match { + def isInteresting(prefix: Type): Boolean = prefix.stripTypeVar match case NoPrefix => true case prefix: ThisType => sym.owner.isClass && ( @@ -329,9 +317,8 @@ object Checking { case _: RefinedOrRecType | _: AppliedType => true case tp: AnnotatedType => isInteresting(tp.parent) case _ => false - } - if (isInteresting(pre)) { + if (isInteresting(pre)) val pre1 = this(pre, false, false) if (locked.contains(tp) || tp.symbol.infoOrCompleter.isInstanceOf[NoCompleter]) throw CyclicReference(tp.symbol) @@ -339,19 +326,14 @@ object Checking { try if (!tp.symbol.isClass) checkInfo(tp.info) finally locked -= tp tp.withPrefix(pre1) - } else tp - } - catch { + catch case ex: CyclicReference => report.debuglog(i"cycle detected for $tp, $nestedCycleOK, $cycleOK") if (cycleOK) LazyRef.of(tp) else if (reportErrors) throw ex else tp - } case _ => mapOver(tp) - } - } /** Under -Yrequire-targetName, if `sym` has an operator name, check that it has a * @targetName annotation. @@ -374,16 +356,14 @@ object Checking { * @return `info` where every legal F-bounded reference is proctected * by a `LazyRef`, or `ErrorType` if a cycle was detected and reported. */ - def checkNonCyclic(sym: Symbol, info: Type, reportErrors: Boolean)(using Context): Type = { + def checkNonCyclic(sym: Symbol, info: Type, reportErrors: Boolean)(using Context): Type = val checker = withMode(Mode.CheckCyclic)(new CheckNonCyclicMap(sym, reportErrors)) try checker.checkInfo(info) - catch { + catch case ex: CyclicReference => if (reportErrors) errorType(IllegalCyclicTypeReference(sym, checker.where, checker.lastChecked), sym.srcPos) else info - } - } /** Check that refinement satisfies the following two conditions * 1. No part of it refers to a symbol that's defined in the same refinement @@ -395,15 +375,15 @@ object Checking { * deprecated warnings, not errors. */ def checkRefinementNonCyclic(refinement: Tree, refineCls: ClassSymbol, seen: mutable.Set[Symbol]) - (using Context): Unit = { + (using Context): Unit = def flag(what: String, tree: Tree) = report.warning(i"$what reference in refinement is deprecated", tree.srcPos) def forwardRef(tree: Tree) = flag("forward", tree) def selfRef(tree: Tree) = flag("self", tree) - val checkTree = new TreeAccumulator[Unit] { + val checkTree = new TreeAccumulator[Unit]: def checkRef(tree: Tree, sym: Symbol) = if (sym.maybeOwner == refineCls && !seen(sym)) forwardRef(tree) - def apply(x: Unit, tree: Tree)(using Context) = tree match { + def apply(x: Unit, tree: Tree)(using Context) = tree match case tree: MemberDef => foldOver(x, tree) seen += tree.symbol @@ -415,39 +395,33 @@ object Checking { case tree: This => selfRef(tree) case tree: TypeTree => - val checkType = new TypeAccumulator[Unit] { - def apply(x: Unit, tp: Type): Unit = tp match { + val checkType = new TypeAccumulator[Unit]: + def apply(x: Unit, tp: Type): Unit = tp match case tp: NamedType => checkRef(tree, tp.symbol) - tp.prefix match { + tp.prefix match case pre: ThisType => case pre => foldOver(x, pre) - } case tp: ThisType if tp.cls == refineCls => selfRef(tree) case _ => foldOver(x, tp) - } - } checkType((), tree.tpe) case _ => foldOver(x, tree) - } - } checkTree((), refinement) - } /** Check type members inherited from different `parents` of `joint` type for cycles, * unless a type with the same name already appears in `decls`. * @return true iff no cycles were detected */ - def checkNonCyclicInherited(joint: Type, parents: List[Type], decls: Scope, pos: SrcPos)(using Context): Unit = { + def checkNonCyclicInherited(joint: Type, parents: List[Type], decls: Scope, pos: SrcPos)(using Context): Unit = // If we don't have more than one parent, then there's nothing to check if (parents.lengthCompare(1) <= 0) return def qualifies(sym: Symbol) = sym.name.isTypeName && !sym.is(Private) - withMode(Mode.CheckCyclic) { + withMode(Mode.CheckCyclic): val abstractTypeNames = for (parent <- parents; mbr <- parent.abstractTypeMembers if qualifies(mbr.symbol)) yield mbr.name.asTypeName @@ -463,11 +437,9 @@ object Checking { catch case _: RecursionOverflow | _: CyclicReference => report.error(em"cyclic reference involving type $name", pos) false - } - } /** Check that symbol's definition is well-formed. */ - def checkWellFormed(sym: Symbol)(using Context): Unit = { + def checkWellFormed(sym: Symbol)(using Context): Unit = def fail(msg: Message) = report.error(msg, sym.srcPos) def warn(msg: Message) = report.warning(msg, sym.srcPos) @@ -491,11 +463,10 @@ object Checking { fail(ParamsNoInline(sym.owner)) if sym.isInlineMethod && !sym.is(Deferred) && sym.allOverriddenSymbols.nonEmpty then checkInlineOverrideParameters(sym) - if (sym.is(Implicit)) { + if (sym.is(Implicit)) assert(!sym.owner.is(Package), s"top-level implicit $sym should be wrapped by a package after typer") if sym.isType && (!sym.isClass || sym.is(Trait)) then fail(TypesAndTraitsCantBeImplicit()) - } if sym.is(Transparent) then if sym.isType then if !sym.isExtensibleClass then fail(em"`transparent` can only be used for extensible classes and traits") @@ -531,18 +502,16 @@ object Checking { fail(em"Inline methods cannot be @tailrec") if sym.hasAnnotation(defn.TargetNameAnnot) && sym.isClass && sym.isTopLevelClass then fail(TargetNameOnTopLevelClass(sym)) - if (sym.hasAnnotation(defn.NativeAnnot)) { + if (sym.hasAnnotation(defn.NativeAnnot)) if (!sym.is(Deferred)) fail(NativeMembersMayNotHaveImplementation(sym)) else if(sym.owner.is(Trait)) fail(TraitMayNotDefineNativeMethod(sym)) - } - else if (sym.is(Deferred, butNot = Param) && !sym.isType && !sym.isSelfSym) { + else if (sym.is(Deferred, butNot = Param) && !sym.isType && !sym.isSelfSym) if (!sym.owner.isClass || sym.owner.is(Module) || sym.owner.isAnonymousClass) fail(OnlyClassesCanHaveDeclaredButUndefinedMembers(sym)) checkWithDeferred(Private) checkWithDeferred(Final) - } if (sym.isValueClass && sym.is(Trait) && !sym.isRefinementClass) fail(CannotExtendAnyVal(sym)) if (sym.isConstructor && !sym.isPrimaryConstructor && sym.owner.is(Trait, butNot = JavaDefined)) @@ -551,10 +520,9 @@ object Checking { checkApplicable(Inline, sym.isTerm && !sym.isOneOf(Mutable | Module)) checkApplicable(Lazy, !sym.isOneOf(Method | Mutable)) if (sym.isType && !sym.isOneOf(Deferred | JavaDefined)) - for (cls <- sym.allOverriddenSymbols.filter(_.isClass)) { + for (cls <- sym.allOverriddenSymbols.filter(_.isClass)) fail(CannotHaveSameNameAs(sym, cls, CannotHaveSameNameAs.CannotBeOverridden)) sym.setFlag(Private) // break the overriding relationship by making sym Private - } checkApplicable(Erased, !sym.isOneOf(MutableOrLazy, butNot = Given) && !sym.isType || sym.isClass) checkCombination(Final, Open) @@ -569,7 +537,6 @@ object Checking { // as the code is inlined and the reference is removed before the erased usage check. checkCombination(Erased, Inline) checkNoConflict(Lazy, ParamAccessor, em"parameter may not be `lazy`") - } /** Check for illegal or redundant modifiers on modules. This is done separately * from checkWellformed, since the original module modifiers don't surivive desugaring @@ -605,8 +572,8 @@ object Checking { * * @return The `info` of `sym`, with problematic aliases expanded away. */ - def checkNoPrivateLeaks(sym: Symbol)(using Context): Type = { - class NotPrivate extends TypeMap { + def checkNoPrivateLeaks(sym: Symbol)(using Context): Type = + class NotPrivate extends TypeMap: var errors: List[Message] = Nil private var inCaptureSet: Boolean = false @@ -633,34 +600,31 @@ object Checking { // class parameters in capture sets are not treated as leaked since in // phase CheckCaptures these are treated as normal vals. - def apply(tp: Type): Type = tp match { + def apply(tp: Type): Type = tp match case tp: NamedType => val prevErrors = errors var tp1 = - if (isLeaked(tp.symbol)) { + if (isLeaked(tp.symbol)) errors = em"non-private ${sym.showLocated} refers to private ${tp.symbol}\nin its type signature ${sym.info}" :: errors tp - } else mapOver(tp) - if ((errors ne prevErrors) && tp.info.isTypeAlias) { + if ((errors ne prevErrors) && tp.info.isTypeAlias) // try to dealias to avoid a leak error val savedErrors = errors errors = prevErrors val tp2 = apply(tp.superType) if (errors eq prevErrors) tp1 = tp2 else errors = savedErrors - } tp1 case tp: ClassInfo => - def transformedParent(tp: Type): Type = tp match { + def transformedParent(tp: Type): Type = tp match case ref: TypeRef => ref case ref: AppliedType => ref case AnnotatedType(parent, annot) => AnnotatedType(transformedParent(parent), annot) case _ => defn.ObjectType // can happen if class files are missing - } tp.derivedClassInfo( prefix = apply(tp.prefix), declaredParents = @@ -676,17 +640,14 @@ object Checking { derivedAnnotatedType(tp, underlying1, annot1) case _ => mapOver(tp) - } - } val notPrivate = new NotPrivate val info = notPrivate(sym.info) notPrivate.errors.foreach(report.errorOrMigrationWarning(_, sym.srcPos, from = `3.0`)) info - } /** Verify classes extending AnyVal meet the requirements */ - def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(using Context): Unit = { - def checkValueClassMember(stat: Tree) = stat match { + def checkDerivedValueClass(clazz: Symbol, stats: List[Tree])(using Context): Unit = + def checkValueClassMember(stat: Tree) = stat match case _: TypeDef if stat.symbol.isClass => report.error(ValueClassesMayNotDefineInner(clazz, stat.symbol), stat.srcPos) case _: ValDef if !stat.symbol.is(ParamAccessor) => @@ -697,8 +658,7 @@ object Checking { // ok case _ => report.error(ValueClassesMayNotContainInitalization(clazz), stat.srcPos) - } - if (isDerivedValueClass(clazz)) { + if (isDerivedValueClass(clazz)) if (clazz.is(Trait)) report.error(CannotExtendAnyVal(clazz), clazz.srcPos) if (clazz.is(Abstract)) @@ -707,11 +667,11 @@ object Checking { report.error(ValueClassesMayNotBeContainted(clazz), clazz.srcPos) if (isDerivedValueClass(underlyingOfValueClass(clazz.asClass).classSymbol)) report.error(ValueClassesMayNotWrapAnotherValueClass(clazz), clazz.srcPos) - else { + else val clParamAccessors = clazz.asClass.paramAccessors.filter { param => param.isTerm && !param.is(Flags.Accessor) } - clParamAccessors match { + clParamAccessors match case param :: params => if (param.is(Mutable)) report.error(ValueClassParameterMayNotBeAVar(clazz, param), param.srcPos) @@ -724,11 +684,7 @@ object Checking { report.error("value class can only have one non `erased` parameter", p.srcPos) case Nil => report.error(ValueClassNeedsOneValParam(clazz), clazz.srcPos) - } - } stats.foreach(checkValueClassMember) - } - } /** Check the inline override methods only use inline parameters if they override an inline parameter. */ def checkInlineOverrideParameters(sym: Symbol)(using Context): Unit = @@ -803,9 +759,8 @@ object Checking { else Feature.checkExperimentalFeature("features", imp.srcPos) case _ => end checkExperimentalImports -} -trait Checking { +trait Checking: import tpd._ @@ -820,24 +775,23 @@ trait Checking { if !tp.isStable && !tp.isErroneous then report.error(NotAPath(tp, kind), pos) /** Check that all type members of `tp` have realizable bounds */ - def checkRealizableBounds(cls: Symbol, pos: SrcPos)(using Context): Unit = { + def checkRealizableBounds(cls: Symbol, pos: SrcPos)(using Context): Unit = val rstatus = boundsRealizability(cls.thisType) if (rstatus ne Realizable) report.error(em"$cls cannot be instantiated since it${rstatus.msg}", pos) - } /** Check that pattern `pat` is irrefutable for scrutinee type `sel.tpe`. * This means `sel` is either marked @unchecked or `sel.tpe` conforms to the * pattern's type. If pattern is an UnApply, also check that the extractor is * irrefutable, and do the check recursively. */ - def checkIrrefutable(sel: Tree, pat: Tree, isPatDef: Boolean)(using Context): Boolean = { + def checkIrrefutable(sel: Tree, pat: Tree, isPatDef: Boolean)(using Context): Boolean = val pt = sel.tpe enum Reason: case NonConforming, RefutableExtractor - def fail(pat: Tree, pt: Type, reason: Reason): Boolean = { + def fail(pat: Tree, pt: Type, reason: Reason): Boolean = import Reason._ val message = reason match case NonConforming => @@ -883,7 +837,6 @@ trait Checking { |which $addendum.$rewriteMsg"""), pos, warnFrom = `3.2`, errorFrom = `future`) false - } def check(pat: Tree, pt: Type): Boolean = (pt <:< pat.tpe) || fail(pat, pt, Reason.NonConforming) @@ -897,10 +850,9 @@ trait Checking { recur(pat1, pt) case UnApply(fn, implicits, pats) => check(pat, pt) && - (isIrrefutable(fn, pats.length) || isIrrefutableQuotedPattern(fn, implicits, pt) || fail(pat, pt, Reason.RefutableExtractor)) && { + (isIrrefutable(fn, pats.length) || isIrrefutableQuotedPattern(fn, implicits, pt) || fail(pat, pt, Reason.RefutableExtractor)) `&&`: val argPts = unapplyArgs(fn.tpe.widen.finalResultType, fn, pats, pat.srcPos) pats.corresponds(argPts)(recur) - } case Alternative(pats) => pats.forall(recur(_, pt)) case Typed(arg, tpt) => @@ -909,17 +861,15 @@ trait Checking { true case _ => check(pat, pt) - } + } recur(pat, pt) - } - private def checkLegalImportOrExportPath(path: Tree, kind: String)(using Context): Unit = { + private def checkLegalImportOrExportPath(path: Tree, kind: String)(using Context): Unit = checkStable(path.tpe, path.srcPos, kind) if (!ctx.isAfterTyper) Checking.checkRealizable(path.tpe, path.srcPos) if !isIdempotentExpr(path) && !path.tpe.isErroneous then report.error(em"import prefix is not a pure expression", path.srcPos) - } /** Check that `path` is a legal prefix for an import clause */ def checkLegalImportPath(path: Tree)(using Context): Unit = @@ -971,7 +921,7 @@ trait Checking { * @return `tp` itself if it is a class or trait ref, ObjectType if not. */ def checkClassType(tp: Type, pos: SrcPos, traitReq: Boolean, stablePrefixReq: Boolean)(using Context): Type = - tp.underlyingClassRef(refinementOK = false) match { + tp.underlyingClassRef(refinementOK = false) match case tref: TypeRef => if (traitReq && !tref.symbol.is(Trait)) report.error(TraitIsExpected(tref.symbol), pos) if (stablePrefixReq && ctx.phase <= refchecksPhase) checkStable(tref.prefix, pos, "class prefix") @@ -979,7 +929,6 @@ trait Checking { case _ => report.error(NotClassType(tp), pos) defn.ObjectType - } /** If `sym` is an old-style implicit conversion, check that implicit conversions are enabled. * @pre sym.is(GivenOrImplicit) @@ -1009,18 +958,17 @@ trait Checking { checkFeature(nme.implicitConversions, i"Use of implicit conversion ${conv.showLocated}", NoSymbol, tree.srcPos) - private def infixOKSinceFollowedBy(tree: untpd.Tree): Boolean = tree match { + private def infixOKSinceFollowedBy(tree: untpd.Tree): Boolean = tree match case _: untpd.Block | _: untpd.Match => true case _ => false - } /** Check that `tree` is a valid infix operation. That is, if the * operator is alphanumeric, it must be declared `infix`. */ - def checkValidInfix(tree: untpd.InfixOp, meth: Symbol)(using Context): Unit = { - tree.op match { + def checkValidInfix(tree: untpd.InfixOp, meth: Symbol)(using Context): Unit = + tree.op match case id @ Ident(name: Name) => - name.toTermName match { + name.toTermName match case name: SimpleName if !untpd.isBackquoted(id) && !name.isOperatorName && @@ -1040,14 +988,10 @@ trait Checking { em"""Alphanumeric $kind $name is not declared ${hlAsKeyword("infix")}; it should not be used as infix operator. |Instead, use ${alternative(name)} or backticked identifier `$name`.$rewriteMsg""", tree.op.srcPos) - if (ctx.settings.deprecation.value) { + if (ctx.settings.deprecation.value) patch(Span(tree.op.span.start, tree.op.span.start), "`") patch(Span(tree.op.span.end, tree.op.span.end), "`") - } case _ => - } - } - } /** Issue a feature warning if feature is not enabled */ def checkFeature(name: TermName, @@ -1061,15 +1005,14 @@ trait Checking { * are feasible, i.e. that their lower bound conforms to their upper bound. If a type * argument is infeasible, issue and error and continue with upper bound. */ - def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = { - def checkGoodBounds(tp: Type) = tp match { + def checkFeasibleParent(tp: Type, pos: SrcPos, where: => String = "")(using Context): Type = + def checkGoodBounds(tp: Type) = tp match case tp @ TypeBounds(lo, hi) if !(lo <:< hi) => report.error(em"no type exists between low bound $lo and high bound $hi$where", pos) TypeBounds(hi, hi) case _ => tp - } - tp match { + tp match case tp @ AndType(tp1, tp2) => report.error(em"conflicting type arguments$where", pos) tp1 @@ -1079,26 +1022,23 @@ trait Checking { tp.derivedRefinedType(tp.parent, tp.refinedName, checkGoodBounds(tp.refinedInfo)) case _ => tp - } - } /** A hook to exclude selected symbols from double declaration check */ def excludeFromDoubleDeclCheck(sym: Symbol)(using Context): Boolean = false /** Check that class does not declare same symbol twice */ - def checkNoDoubleDeclaration(cls: Symbol)(using Context): Unit = { - val seen = new mutable.HashMap[Name, List[Symbol]] { + def checkNoDoubleDeclaration(cls: Symbol)(using Context): Unit = + val seen = new mutable.HashMap[Name, List[Symbol]]: override def default(key: Name) = Nil - } typr.println(i"check no double declarations $cls") - def checkDecl(decl: Symbol): Unit = { - for (other <- seen(decl.name) if !decl.isAbsent() && !other.isAbsent()) { + def checkDecl(decl: Symbol): Unit = + for (other <- seen(decl.name) if !decl.isAbsent() && !other.isAbsent()) typr.println(i"conflict? $decl $other") def javaFieldMethodPair = decl.is(JavaDefined) && other.is(JavaDefined) && decl.is(Method) != other.is(Method) - if (decl.matches(other) && !javaFieldMethodPair) { + if (decl.matches(other) && !javaFieldMethodPair) def doubleDefError(decl: Symbol, other: Symbol): Unit = if (!decl.info.isErroneous && !other.info.isErroneous) report.error(DoubleDefinition(decl, other, cls), decl.srcPos) @@ -1106,24 +1046,19 @@ trait Checking { () // do nothing; we already have reported an error that overloaded variants cannot have default arguments else if (decl is Synthetic) doubleDefError(other, decl) else doubleDefError(decl, other) - } if decl.hasDefaultParams && other.hasDefaultParams then report.error(em"two or more overloaded variants of $decl have default arguments", decl.srcPos) decl.resetFlag(HasDefaultParams) - } if (!excludeFromDoubleDeclCheck(decl)) seen(decl.name) = decl :: seen(decl.name) - } cls.info.decls.foreach(checkDecl) - cls.info match { + cls.info match case ClassInfo(_, _, _, _, selfSym: Symbol) => checkDecl(selfSym) case _ => - } - } def checkParentCall(call: Tree, caller: ClassSymbol)(using Context): Unit = - if (!ctx.isAfterTyper) { + if (!ctx.isAfterTyper) val called = call.tpe.classSymbol if (called.is(JavaAnnotation)) report.error(em"${called.name} must appear without any argument to be a valid class parent because it is a Java annotation", call.srcPos) @@ -1135,17 +1070,14 @@ trait Checking { // Check that constructor call is of the form _.(args1)...(argsN). // This guards against calls resulting from inserted implicits or applies. - def checkLegalConstructorCall(tree: Tree, encl: Tree, kind: String): Unit = tree match { + def checkLegalConstructorCall(tree: Tree, encl: Tree, kind: String): Unit = tree match case Apply(fn, _) => checkLegalConstructorCall(fn, tree, "") case TypeApply(fn, _) => checkLegalConstructorCall(fn, tree, "type ") case Select(_, nme.CONSTRUCTOR) => // ok case _ => report.error(em"too many ${kind}arguments in parent constructor", encl.srcPos) - } - call match { + call match case Apply(fn, _) => checkLegalConstructorCall(fn, call, "") case _ => - } - } /** Check that `tpt` does not define a higher-kinded type */ def checkSimpleKinded(tpt: Tree)(using Context): Tree = @@ -1171,7 +1103,7 @@ trait Checking { * The standard library relies on this idiom. */ def checkTraitInheritance(parent: Symbol, cls: ClassSymbol, pos: SrcPos)(using Context): Unit = - parent match { + parent match case parent: ClassSymbol if parent.is(Trait) => val psuper = parent.superClass val csuper = cls.superClass @@ -1181,33 +1113,29 @@ trait Checking { if (!ok) report.error(em"illegal trait inheritance: super$csuper does not derive from $parent's super$psuper", pos) case _ => - } /** Check that case classes are not inherited by case classes. */ def checkCaseInheritance(parent: Symbol, caseCls: ClassSymbol, pos: SrcPos)(using Context): Unit = - parent match { + parent match case parent: ClassSymbol => if (parent.is(Case)) report.error(em"""case $caseCls has case ancestor $parent, but case-to-case inheritance is prohibited. |To overcome this limitation, use extractors to pattern match on non-leaf nodes.""", pos) else checkCaseInheritance(parent.superClass, caseCls, pos) case _ => - } /** Check that method parameter types do not reference their own parameter * or later parameters in the same parameter section. */ - def checkNoForwardDependencies(vparams: List[ValDef])(using Context): Unit = vparams match { + def checkNoForwardDependencies(vparams: List[ValDef])(using Context): Unit = vparams match case vparam :: vparams1 => - vparam.tpt.foreachSubTree { + vparam.tpt.foreachSubTree: case id: Ident if vparams.exists(_.symbol == id.symbol) => report.error(em"illegal forward reference to method parameter", id.srcPos) case _ => - } checkNoForwardDependencies(vparams1) case Nil => - } /** Check that all named types that form part of this type have a denotation. * Called on inferred (result) types of ValDefs and DefDefs. @@ -1215,45 +1143,37 @@ trait Checking { * of the self type, yet is no longer visible once the `this` has been replaced * by some other prefix. See neg/i3083.scala */ - def checkMembersOK(tp: Type, pos: SrcPos)(using Context): Type = { + def checkMembersOK(tp: Type, pos: SrcPos)(using Context): Type = var ok = true - val check: Type => Unit = { + val check: Type => Unit = case ref: NamedType => val d = try ref.denot catch { case ex: TypeError => NoDenotation } - if (!d.exists) { + if (!d.exists) report.error(em"$ref is not defined in inferred type $tp", pos) ok = false - } case _ => - } tp.foreachPart(check, StopAt.Static) if (ok) tp else UnspecifiedErrorType - } /** Check that all non-synthetic references of the form `` or * `this.` in `tree` that refer to a member of `badOwner` are * `allowed`. Also check that there are no other explicit `this` references * to `badOwner`. */ - def checkRefsLegal(tree: tpd.Tree, badOwner: Symbol, allowed: (Name, Symbol) => Boolean, where: String)(using Context): Unit = { - val checker = new TreeTraverser { - def traverse(t: Tree)(using Context) = { + def checkRefsLegal(tree: tpd.Tree, badOwner: Symbol, allowed: (Name, Symbol) => Boolean, where: String)(using Context): Unit = + val checker = new TreeTraverser: + def traverse(t: Tree)(using Context) = def check(owner: Symbol, checkedSym: Symbol) = if (t.span.isSourceDerived && owner == badOwner) - t match { + t match case t: RefTree if allowed(t.name, checkedSym) => case _ => report.error(em"illegal reference to $checkedSym from $where", t.srcPos) - } val sym = t.symbol - t match { + t match case Ident(_) | Select(This(_), _) => check(sym.maybeOwner, sym) case This(_) => check(sym, sym) case _ => traverseChildren(t) - } - } - } checker.traverse(tree) - } /** Check that user-defined (result) type is fully applied */ def checkFullyAppliedType(tree: Tree)(using Context): Unit = tree match @@ -1312,7 +1232,7 @@ trait Checking { * 3. Check that only a static `enum` base class can extend java.lang.Enum. * 4. Check that user does not implement an `ordinal` method in the body of an enum class. */ - def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = { + def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = def existingDef(sym: Symbol, clazz: ClassSymbol)(using Context): Symbol = // adapted from SyntheticMembers val existing = sym.matchingMember(clazz.thisType) if existing != sym && !existing.is(Deferred) then existing else NoSymbol @@ -1344,7 +1264,6 @@ trait Checking { report.error(ClassCannotExtendEnum(cls, firstParent), cdef.srcPos) if cls.isEnumClass && !isJavaEnum then checkExistingOrdinal - } /** Check that the firstParent for an enum case derives from the declaring enum class, if not, adds it as a parent * after emitting an error. @@ -1380,51 +1299,44 @@ trait Checking { * @param cdef the enum companion object class * @param enumCtx the context immediately enclosing the corresponding enum */ - def checkEnumCaseRefsLegal(cdef: TypeDef, enumCtx: Context)(using Context): Unit = { + def checkEnumCaseRefsLegal(cdef: TypeDef, enumCtx: Context)(using Context): Unit = - def checkEnumCaseOrDefault(stat: Tree, caseCtx: Context) = { + def checkEnumCaseOrDefault(stat: Tree, caseCtx: Context) = - def check(tree: Tree) = { + def check(tree: Tree) = // allow access to `sym` if a typedIdent just outside the enclosing enum // would have produced the same symbol without errors - def allowAccess(name: Name, sym: Symbol): Boolean = { + def allowAccess(name: Name, sym: Symbol): Boolean = val testCtx = caseCtx.fresh.setNewTyperState() val ref = ctx.typer.typedIdent(untpd.Ident(name).withSpan(stat.span), WildcardType)(using testCtx) ref.symbol == sym && !testCtx.reporter.hasErrors - } checkRefsLegal(tree, cdef.symbol, allowAccess, "enum case") - } if (stat.symbol.isAllOf(EnumCase)) - stat match { + stat match case TypeDef(_, impl @ Template(DefDef(_, paramss, _, _), _, _, _)) => paramss.foreach(_.foreach(check)) impl.parents.foreach(check) case vdef: ValDef => - vdef.rhs match { + vdef.rhs match case Block((clsDef @ TypeDef(_, impl: Template)) :: Nil, _) if clsDef.symbol.isAnonymousClass => impl.parents.foreach(check) case _ => - } case _ => - } else if (stat.symbol.is(Module) && stat.symbol.linkedClass.isAllOf(EnumCase)) - stat match { + stat match case TypeDef(_, impl: Template) => for (case (defaultGetter @ DefDef(DefaultGetterName(nme.CONSTRUCTOR, _), _, _, _)) <- impl.body) check(defaultGetter.rhs) case _ => - } - } - cdef.rhs match { + cdef.rhs match case impl: Template => - def isEnumCase(stat: Tree) = stat match { + def isEnumCase(stat: Tree) = stat match case _: ValDef | _: TypeDef => stat.symbol.isAllOf(EnumCase) case _ => false - } val cases = for (stat <- impl.body if isEnumCase(stat)) yield untpd.ImportSelector(untpd.Ident(stat.symbol.name.toTermName)) @@ -1432,23 +1344,19 @@ trait Checking { val caseCtx = enumCtx.importContext(caseImport, caseImport.symbol) for (stat <- impl.body) checkEnumCaseOrDefault(stat, caseCtx) case _ => - } - } /** check that annotation `annot` is applicable to symbol `sym` */ def checkAnnotApplicable(annot: Tree, sym: Symbol)(using Context): Boolean = - !ctx.reporter.reportsErrorsFor { + !ctx.reporter.reportsErrorsFor: val annotCls = Annotations.annotClass(annot) val concreteAnnot = Annotations.ConcreteAnnotation(annot) val pos = annot.srcPos - if (annotCls == defn.MainAnnot || concreteAnnot.matches(defn.MainAnnotationClass)) { + if (annotCls == defn.MainAnnot || concreteAnnot.matches(defn.MainAnnotationClass)) if (!sym.isRealMethod) report.error(em"main annotation cannot be applied to $sym", pos) if (!sym.owner.is(Module) || !sym.owner.isStatic) report.error(em"$sym cannot be a main method since it cannot be accessed statically", pos) - } // TODO: Add more checks here - } /** Check that symbol's external name does not clash with symbols defined in the same scope */ def checkNoTargetNameConflict(stats: List[Tree])(using Context): Unit = @@ -1521,9 +1429,8 @@ trait Checking { for sel <- selectors do if !sel.isWildcard then checkIdent(sel) end checkImportSelectors -} -trait ReChecking extends Checking { +trait ReChecking extends Checking: import tpd._ override def checkEnumParent(cls: Symbol, firstParent: Symbol)(using Context): Unit = () override def checkEnum(cdef: untpd.TypeDef, cls: Symbol, firstParent: Symbol)(using Context): Unit = () @@ -1537,9 +1444,8 @@ trait ReChecking extends Checking { override def checkCatch(pat: Tree, guard: Tree)(using Context): Unit = () override def checkNoContextFunctionType(tree: Tree)(using Context): Unit = () override def checkFeature(name: TermName, description: => String, featureUseSite: Symbol, pos: SrcPos)(using Context): Unit = () -} -trait NoChecking extends ReChecking { +trait NoChecking extends ReChecking: import tpd._ override def checkNonCyclic(sym: Symbol, info: TypeBounds, reportErrors: Boolean)(using Context): Type = info override def checkNonCyclicInherited(joint: Type, parents: List[Type], decls: Scope, pos: SrcPos)(using Context): Unit = () @@ -1560,4 +1466,3 @@ trait NoChecking extends ReChecking { override def checkInInlineContext(what: String, pos: SrcPos)(using Context): Unit = () override def checkValidInfix(tree: untpd.InfixOp, meth: Symbol)(using Context): Unit = () override def checkImportSelectors(qualType: Type, selectors: List[untpd.ImportSelector])(using Context): Unit = () -} diff --git a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala index b55c8c64e3b1..1c670bf2b5f3 100644 --- a/compiler/src/dotty/tools/dotc/typer/ConstFold.scala +++ b/compiler/src/dotty/tools/dotc/typer/ConstFold.scala @@ -77,7 +77,7 @@ object ConstFold: private def withFoldedType(c: Constant | Null): T = if c == null then tree else tree.withType(ConstantType(c)).asInstanceOf[T] - private def foldUnop(op: Name, x: Constant): Constant | Null = (op, x.tag) match { + private def foldUnop(op: Name, x: Constant): Constant | Null = (op, x.tag) match case (nme.UNARY_!, BooleanTag) => Constant(!x.booleanValue) case (nme.UNARY_~ , IntTag ) => Constant(~x.intValue) @@ -100,12 +100,11 @@ object ConstFold: case (nme.toDouble, _ ) if x.isNumeric => Constant(x.doubleValue) case _ => null - } /** These are local helpers to keep foldBinop from overly taxing the * optimizer. */ - private def foldBooleanOp(op: Name, x: Constant, y: Constant): Constant | Null = op match { + private def foldBooleanOp(op: Name, x: Constant, y: Constant): Constant | Null = op match case nme.ZOR => Constant(x.booleanValue | y.booleanValue) case nme.OR => Constant(x.booleanValue | y.booleanValue) case nme.XOR => Constant(x.booleanValue ^ y.booleanValue) @@ -114,8 +113,7 @@ object ConstFold: case nme.EQ => Constant(x.booleanValue == y.booleanValue) case nme.NE => Constant(x.booleanValue != y.booleanValue) case _ => null - } - private def foldSubrangeOp(op: Name, x: Constant, y: Constant): Constant | Null = op match { + private def foldSubrangeOp(op: Name, x: Constant, y: Constant): Constant | Null = op match case nme.OR => Constant(x.intValue | y.intValue) case nme.XOR => Constant(x.intValue ^ y.intValue) case nme.AND => Constant(x.intValue & y.intValue) @@ -134,8 +132,7 @@ object ConstFold: case nme.DIV => Constant(x.intValue / y.intValue) case nme.MOD => Constant(x.intValue % y.intValue) case _ => null - } - private def foldLongOp(op: Name, x: Constant, y: Constant): Constant | Null = op match { + private def foldLongOp(op: Name, x: Constant, y: Constant): Constant | Null = op match case nme.OR => Constant(x.longValue | y.longValue) case nme.XOR => Constant(x.longValue ^ y.longValue) case nme.AND => Constant(x.longValue & y.longValue) @@ -154,8 +151,7 @@ object ConstFold: case nme.DIV => Constant(x.longValue / y.longValue) case nme.MOD => Constant(x.longValue % y.longValue) case _ => null - } - private def foldFloatOp(op: Name, x: Constant, y: Constant): Constant | Null = op match { + private def foldFloatOp(op: Name, x: Constant, y: Constant): Constant | Null = op match case nme.EQ => Constant(x.floatValue == y.floatValue) case nme.NE => Constant(x.floatValue != y.floatValue) case nme.LT => Constant(x.floatValue < y.floatValue) @@ -168,8 +164,7 @@ object ConstFold: case nme.DIV => Constant(x.floatValue / y.floatValue) case nme.MOD => Constant(x.floatValue % y.floatValue) case _ => null - } - private def foldDoubleOp(op: Name, x: Constant, y: Constant): Constant | Null = op match { + private def foldDoubleOp(op: Name, x: Constant, y: Constant): Constant | Null = op match case nme.EQ => Constant(x.doubleValue == y.doubleValue) case nme.NE => Constant(x.doubleValue != y.doubleValue) case nme.LT => Constant(x.doubleValue < y.doubleValue) @@ -182,13 +177,11 @@ object ConstFold: case nme.DIV => Constant(x.doubleValue / y.doubleValue) case nme.MOD => Constant(x.doubleValue % y.doubleValue) case _ => null - } - private def foldStringOp(op: Name, x: Constant, y: Constant): Constant | Null = op match { + private def foldStringOp(op: Name, x: Constant, y: Constant): Constant | Null = op match case nme.ADD => Constant(x.stringValue + y.stringValue) case nme.EQ => Constant(x.stringValue == y.stringValue) case nme.NE => Constant(x.stringValue != y.stringValue) case _ => null - } private def foldNullOp(op: Name, x: Constant, y: Constant): Constant | Null= assert(x.tag == NullTag || y.tag == NullTag) @@ -213,7 +206,7 @@ object ConstFold: case StringTag => foldStringOp(op, x, y) case NullTag => foldNullOp(op, x, y) case _ => null - catch case ex: ArithmeticException => null // the code will crash at runtime, + catch case ex: ArithmeticException => null // the code will crash at runtime, // but that is better than the // compiler itself crashing end foldBinop diff --git a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala index 4087c5faf404..4546c2602ac5 100644 --- a/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/CrossVersionChecks.scala @@ -93,9 +93,9 @@ class CrossVersionChecks extends MiniPhase: * concrete, non-deprecated method. If it does, then * deprecation is meaningless. */ - private def checkDeprecatedOvers(tree: Tree)(using Context): Unit = { + private def checkDeprecatedOvers(tree: Tree)(using Context): Unit = val symbol = tree.symbol - if (symbol.isDeprecated) { + if (symbol.isDeprecated) val concrOvers = symbol.allOverriddenSymbols.filter(sym => !sym.isDeprecated && !sym.is(Deferred)) @@ -104,8 +104,6 @@ class CrossVersionChecks extends MiniPhase: em"""$symbol overrides concrete, non-deprecated definition(s): | ${concrOvers.map(_.name).mkString(", ")}""", tree.srcPos) - } - } override def transformValDef(tree: ValDef)(using Context): ValDef = checkDeprecatedOvers(tree) @@ -122,24 +120,21 @@ class CrossVersionChecks extends MiniPhase: checkExperimentalAnnots(tree.symbol) tree - override def transformIdent(tree: Ident)(using Context): Ident = { + override def transformIdent(tree: Ident)(using Context): Ident = checkUndesiredProperties(tree.symbol, tree.srcPos) tree - } - override def transformSelect(tree: Select)(using Context): Select = { + override def transformSelect(tree: Select)(using Context): Select = checkUndesiredProperties(tree.symbol, tree.srcPos) tree - } - override def transformNew(tree: New)(using Context): New = { + override def transformNew(tree: New)(using Context): New = checkUndesiredProperties(tree.tpe.typeSymbol, tree.srcPos) tree - } - override def transformTypeTree(tree: TypeTree)(using Context): TypeTree = { + override def transformTypeTree(tree: TypeTree)(using Context): TypeTree = val tpe = tree.tpe - tpe.foreachPart { + tpe.foreachPart: case TypeRef(_, sym: Symbol) => checkDeprecated(sym, tree.srcPos) checkExperimentalRef(sym, tree.srcPos) @@ -147,17 +142,14 @@ class CrossVersionChecks extends MiniPhase: checkDeprecated(sym, tree.srcPos) checkExperimentalRef(sym, tree.srcPos) case _ => - } tree - } override def transformOther(tree: Tree)(using Context): Tree = - tree.foreachSubTree { // Find references in type trees and imports + tree.foreachSubTree: // Find references in type trees and imports case tree: Ident => transformIdent(tree) case tree: Select => transformSelect(tree) case tree: TypeTree => transformTypeTree(tree) case _ => - } tree end CrossVersionChecks diff --git a/compiler/src/dotty/tools/dotc/typer/Deriving.scala b/compiler/src/dotty/tools/dotc/typer/Deriving.scala index 8fdc468780ba..3ce49f885049 100644 --- a/compiler/src/dotty/tools/dotc/typer/Deriving.scala +++ b/compiler/src/dotty/tools/dotc/typer/Deriving.scala @@ -14,7 +14,7 @@ import collection.mutable import ErrorReporting.errorTree /** A typer mixin that implements type class derivation functionality */ -trait Deriving { +trait Deriving: this: Typer => /** A helper class to derive type class instances for one class or object @@ -23,25 +23,24 @@ trait Deriving { * synthesized infrastructure code that is not connected with a * `derives` instance. */ - class Deriver(cls: ClassSymbol, codePos: SrcPos)(using Context) { + class Deriver(cls: ClassSymbol, codePos: SrcPos)(using Context): /** A buffer for synthesized symbols for type class instances */ private var synthetics = new mutable.ListBuffer[Symbol] /** A version of Type#underlyingClassRef that works also for higher-kinded types */ - private def underlyingClassRef(tp: Type): Type = tp match { + private def underlyingClassRef(tp: Type): Type = tp match case tp: TypeRef if tp.symbol.isClass => tp case tp: TypeRef if tp.symbol.isAbstractType => NoType case tp: TermRef => NoType case tp: TypeProxy => underlyingClassRef(tp.superType) case _ => NoType - } /** Enter type class instance with given name and info in current scope, provided * an instance with the same name does not exist already. * @param reportErrors Report an error if an instance with the same name exists already */ - private def addDerivedInstance(clsName: Name, info: Type, pos: SrcPos): Unit = { + private def addDerivedInstance(clsName: Name, info: Type, pos: SrcPos): Unit = val instanceName = "derived$".concat(clsName) if (ctx.denotNamed(instanceName).exists) report.error(em"duplicate type class derivation for $clsName", pos) @@ -53,7 +52,6 @@ trait Deriving { synthetics += newSymbol(ctx.owner, instanceName, flags, info, coord = pos.span) .entered - } /** Check derived type tree `derived` for the following well-formedness conditions: * (1) It must be a class type with a stable prefix (@see checkClassTypeWithStablePrefix) @@ -76,7 +74,7 @@ trait Deriving { * the underlying class name. This allows one to disambiguate derivations of type classes * that have the same name but different prefixes through selective aliasing. */ - private def processDerivedInstance(derived: untpd.Tree): Unit = { + private def processDerivedInstance(derived: untpd.Tree): Unit = val originalTypeClassType = typedAheadType(derived, AnyTypeConstructorProto).tpe val underlyingClassType = underlyingClassRef(originalTypeClassType) val typeClassType = checkClassType( @@ -92,7 +90,7 @@ trait Deriving { def cannotBeUnified = report.error(em"${cls.name} cannot be unified with the type argument of ${typeClass.name}", derived.srcPos) - def addInstance(derivedParams: List[TypeSymbol], evidenceParamInfos: List[List[Type]], instanceTypes: List[Type]): Unit = { + def addInstance(derivedParams: List[TypeSymbol], evidenceParamInfos: List[List[Type]], instanceTypes: List[Type]): Unit = val resultType = typeClassType.appliedTo(instanceTypes) val monoInfo = if evidenceParamInfos.isEmpty then resultType @@ -101,9 +99,8 @@ trait Deriving { if derivedParams.isEmpty then monoInfo else PolyType.fromParams(derivedParams, monoInfo) addDerivedInstance(originalTypeClassType.typeSymbol.name, derivedInfo, derived.srcPos) - } - def deriveSingleParameter: Unit = { + def deriveSingleParameter: Unit = // Single parameter type classes ... (a) and (b) above // // (a) ADT and type class parameters overlap on the right and have the @@ -161,32 +158,28 @@ trait Deriving { val alignedTypeClassParamInfos = typeClassParamInfos.takeRight(alignedClsParamInfos.length) - if ((instanceArity == clsArity || instanceArity > 0) && sameParamKinds(alignedClsParamInfos, alignedTypeClassParamInfos)) { + if ((instanceArity == clsArity || instanceArity > 0) && sameParamKinds(alignedClsParamInfos, alignedTypeClassParamInfos)) // case (a) ... see description above val derivedParams = clsParams.dropRight(instanceArity) val instanceType = if (instanceArity == clsArity) clsType.EtaExpand(clsParams) - else { + else val derivedParamTypes = derivedParams.map(_.typeRef) HKTypeLambda(typeClassParamInfos.map(_.paramName))( tl => typeClassParamInfos.map(_.paramInfo.bounds), tl => clsType.appliedTo(derivedParamTypes ++ tl.paramRefs.takeRight(clsArity))) - } addInstance(derivedParams, Nil, List(instanceType)) - } - else if (instanceArity == 0 && !clsParams.exists(_.info.isLambdaSub)) { + else if (instanceArity == 0 && !clsParams.exists(_.info.isLambdaSub)) // case (b) ... see description above val instanceType = clsType.appliedTo(clsParams.map(_.typeRef)) val evidenceParamInfos = clsParams.map(param => List(param.typeRef)) addInstance(clsParams, evidenceParamInfos, List(instanceType)) - } else cannotBeUnified - } - def deriveCanEqual: Unit = { + def deriveCanEqual: Unit = // Specific derives rules for the CanEqual type class ... (c) above // // This has been extracted from the earlier more general multi-parameter @@ -228,10 +221,9 @@ trait Deriving { } // Retain only rows with L/R params of kind * which CanEqual can be applied to. // No pairwise evidence will be required for params of other kinds. - val firstKindedParamss = clsParamss.filter { + val firstKindedParamss = clsParamss.filter: case param :: _ => !param.info.isLambdaSub case _ => false - } // The types of the required evidence parameters. In the running example: // CanEqual[T_L, T_R], CanEqual[U_L, U_R], CanEqual[V_L, V_R] @@ -247,7 +239,6 @@ trait Deriving { // CanEqual[A[T_L, U_L, V_L], A[T_R, U_R, V_R]] addInstance(clsParamss.flatten, evidenceParamInfos, instanceTypes) - } if (typeClassArity == 1) deriveSingleParameter else if (typeClass == defn.CanEqualClass) deriveCanEqual @@ -255,7 +246,6 @@ trait Deriving { report.error(em"type ${typeClass.name} in derives clause of ${cls.name} has no type parameters", derived.srcPos) else cannotBeUnified - } /** Create symbols for derived instances and infrastructure, * append them to `synthetics` buffer, and enter them into class scope. @@ -265,7 +255,7 @@ trait Deriving { derived.foreach(processDerivedInstance(_)) /** The synthesized type class instance definitions */ - def syntheticDefs: List[tpd.Tree] = { + def syntheticDefs: List[tpd.Tree] = import tpd._ /** The type class instance definition with symbol `sym` */ @@ -277,17 +267,15 @@ trait Deriving { val vparams = if (vparamRefss.isEmpty) Nil else vparamRefss.head.map(_.symbol.asTerm) tparams.foreach(ctx.enter(_)) vparams.foreach(ctx.enter(_)) - def instantiated(info: Type): Type = info match { + def instantiated(info: Type): Type = info match case info: PolyType => instantiated(info.instantiate(tparamTypes)) case info: MethodType => info.instantiate(vparams.map(_.termRef)) case info => info.widenExpr - } - def companionRef(tp: Type): TermRef = tp match { + def companionRef(tp: Type): TermRef = tp match case tp @ TypeRef(prefix, _) if tp.symbol.isClass => prefix.select(tp.symbol.companionModule).asInstanceOf[TermRef] case tp: TypeProxy => companionRef(tp.superType) - } val resultType = instantiated(sym.info) val companion = companionRef(resultType) val module = untpd.ref(companion).withSpan(sym.span) @@ -296,17 +284,12 @@ trait Deriving { else errorTree(rhs, em"$resultType cannot be derived since ${resultType.typeSymbol} has no companion object") end typeclassInstance - def syntheticDef(sym: Symbol): Tree = inContext(ctx.fresh.setOwner(sym).setNewScope) { + def syntheticDef(sym: Symbol): Tree = inContext(ctx.fresh.setOwner(sym).setNewScope): if sym.is(Method) then tpd.DefDef(sym.asTerm, typeclassInstance(sym)) else tpd.ValDef(sym.asTerm, typeclassInstance(sym)(Nil)) - } synthetics.map(syntheticDef).toList - } - def finalize(stat: tpd.TypeDef): tpd.Tree = { + def finalize(stat: tpd.TypeDef): tpd.Tree = val templ @ Template(_, _, _, _) = stat.rhs: @unchecked tpd.cpy.TypeDef(stat)(rhs = tpd.cpy.Template(templ)(body = templ.body ++ syntheticDefs)) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala index d819528ff556..d0cea2aa3ca1 100644 --- a/compiler/src/dotty/tools/dotc/typer/Docstrings.scala +++ b/compiler/src/dotty/tools/dotc/typer/Docstrings.scala @@ -6,7 +6,7 @@ import core._ import Contexts._, Symbols._, Decorators._, Comments.{_, given} import ast.tpd -object Docstrings { +object Docstrings: /** * Expands or cooks the documentation for `sym` in class `owner`. @@ -26,35 +26,32 @@ object Docstrings { } private def expand(sym: Symbol, owner: Symbol)(using Context)(using docCtx: ContextDocstrings): Option[Comment] = - docCtx.docstring(sym).flatMap { + docCtx.docstring(sym).flatMap: case cmt if cmt.isExpanded => Some(cmt) case _ => expandComment(sym).map { expanded => val typedUsecases = expanded.usecases.map { usecase => ctx.typer.enterSymbol(ctx.typer.createSymbol(usecase.untpdCode)) - ctx.typer.typedStats(usecase.untpdCode :: Nil, owner)._1 match { + ctx.typer.typedStats(usecase.untpdCode :: Nil, owner)._1 match case List(df: tpd.DefDef) => usecase.typed(df) case _ => report.error(em"`@usecase` was not a valid definition", ctx.source.atSpan(usecase.codePos)) usecase - } } val commentWithUsecases = expanded.copy(usecases = typedUsecases) docCtx.addDocstring(sym, Some(commentWithUsecases)) commentWithUsecases } - } - private def expandComment(sym: Symbol, owner: Symbol, comment: Comment)(using Context)(using docCtx: ContextDocstrings): Comment = { + private def expandComment(sym: Symbol, owner: Symbol, comment: Comment)(using Context)(using docCtx: ContextDocstrings): Comment = val tplExp = docCtx.templateExpander tplExp.defineVariables(sym) val newComment = comment.expand(tplExp.expandedDocComment(sym, owner, _)) docCtx.addDocstring(sym, Some(newComment)) newComment - } private def expandComment(sym: Symbol)(using Context)(using docCtx: ContextDocstrings): Option[Comment] = if (sym eq NoSymbol) None @@ -64,4 +61,3 @@ object Docstrings { _ = expandComment(sym.owner) } yield expandComment(sym, sym.owner, cmt) -} diff --git a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala index 717966923708..b3fe897c7d15 100644 --- a/compiler/src/dotty/tools/dotc/typer/Dynamic.scala +++ b/compiler/src/dotty/tools/dotc/typer/Dynamic.scala @@ -19,7 +19,7 @@ import dotty.tools.dotc.transform.ValueClasses import dotty.tools.dotc.transform.TypeUtils.isPrimitiveValueType import reporting.* -object Dynamic { +object Dynamic: private def isDynamicMethod(name: Name): Boolean = name == nme.applyDynamic || name == nme.selectDynamic || name == nme.updateDynamic || name == nme.applyDynamicNamed @@ -35,9 +35,8 @@ object Dynamic { case Select(qual, nme.apply) => isDynamicMethod(qual.symbol.name) && tree.span.isSynthetic case _ => false -} -object DynamicUnapply { +object DynamicUnapply: def unapply(tree: tpd.Tree): Option[List[tpd.Tree]] = tree match case TypeApply(Select(qual, name), _) if name == nme.asInstanceOfPM => unapply(qual) @@ -46,7 +45,6 @@ object DynamicUnapply { Some(selectable :: ctag :: implicits) case _ => None -} /** Handles programmable member selections of `Dynamic` instances and values * with structural types. Two functionalities: @@ -64,7 +62,7 @@ object DynamicUnapply { * or `applyDynamic` on a `Selectable` instance. @See handleStructural. * */ -trait Dynamic { +trait Dynamic: self: Typer & Applications => import Dynamic._ @@ -76,42 +74,35 @@ trait Dynamic { * foo.bar(x = bazX, y = bazY, baz, ...) ~~> foo.applyDynamicNamed("bar")(("x", bazX), ("y", bazY), ("", baz), ...) * foo.bar[T0, ...](x = bazX, y = bazY, baz, ...) ~~> foo.applyDynamicNamed[T0, ...]("bar")(("x", bazX), ("y", bazY), ("", baz), ...) */ - def typedDynamicApply(tree: untpd.Apply, isInsertedApply: Boolean, pt: Type)(using Context): Tree = { - def typedDynamicApply(qual: untpd.Tree, name: Name, selSpan: Span, targs: List[untpd.Tree]): Tree = { + def typedDynamicApply(tree: untpd.Apply, isInsertedApply: Boolean, pt: Type)(using Context): Tree = + def typedDynamicApply(qual: untpd.Tree, name: Name, selSpan: Span, targs: List[untpd.Tree]): Tree = def isNamedArg(arg: untpd.Tree): Boolean = arg match { case NamedArg(_, _) => true; case _ => false } val args = tree.args val dynName = if (args.exists(isNamedArg)) nme.applyDynamicNamed else nme.applyDynamic if (dynName == nme.applyDynamicNamed && untpd.isWildcardStarArgList(args)) errorTree(tree, em"applyDynamicNamed does not support passing a vararg parameter") - else { + else def namedArgTuple(name: String, arg: untpd.Tree) = untpd.Tuple(List(Literal(Constant(name)), arg)) - def namedArgs = args.map { + def namedArgs = args.map: case NamedArg(argName, arg) => namedArgTuple(argName.toString, arg) case arg => namedArgTuple("", arg) - } val args1 = if (dynName == nme.applyDynamic) args else namedArgs typedApply(untpd.Apply(coreDynamic(qual, dynName, name, selSpan, targs), args1), pt) - } - } - if (isInsertedApply) { - tree.fun match { + if (isInsertedApply) + tree.fun match case TypeApply(fun, targs) => typedDynamicApply(fun, nme.apply, fun.span, targs) case fun => typedDynamicApply(fun, nme.apply, fun.span, Nil) - } - } else { - tree.fun match { + else + tree.fun match case sel @ Select(qual, name) if !isDynamicMethod(name) => typedDynamicApply(qual, name, sel.span, Nil) case TypeApply(sel @ Select(qual, name), targs) if !isDynamicMethod(name) => typedDynamicApply(qual, name, sel.span, targs) case _ => errorTree(tree, em"Dynamic insertion not applicable") - } - } - } /** Translate selection that does not typecheck according to the normal rules into a selectDynamic. * foo.bar ~~> foo.selectDynamic(bar) @@ -126,26 +117,23 @@ trait Dynamic { /** Translate selection that does not typecheck according to the normal rules into a updateDynamic. * foo.bar = baz ~~> foo.updateDynamic(bar)(baz) */ - def typedDynamicAssign(tree: untpd.Assign, pt: Type)(using Context): Tree = { + def typedDynamicAssign(tree: untpd.Assign, pt: Type)(using Context): Tree = def typedDynamicAssign(qual: untpd.Tree, name: Name, selSpan: Span, targs: List[untpd.Tree]): Tree = typedApply(untpd.Apply(coreDynamic(qual, nme.updateDynamic, name, selSpan, targs), tree.rhs), pt) - tree.lhs match { + tree.lhs match case sel @ Select(qual, name) if !isDynamicMethod(name) => typedDynamicAssign(qual, name, sel.span, Nil) case TypeApply(sel @ Select(qual, name), targs) if !isDynamicMethod(name) => typedDynamicAssign(qual, name, sel.span, targs) case _ => errorTree(tree, ReassignmentToVal(tree.lhs.symbol.name)) - } - } - private def coreDynamic(qual: untpd.Tree, dynName: Name, name: Name, selSpan: Span, targs: List[untpd.Tree])(using Context): untpd.Apply = { + private def coreDynamic(qual: untpd.Tree, dynName: Name, name: Name, selSpan: Span, targs: List[untpd.Tree])(using Context): untpd.Apply = val select = untpd.Select(qual, dynName).withSpan(selSpan) val selectWithTypes = if (targs.isEmpty) select else untpd.TypeApply(select, targs) untpd.Apply(selectWithTypes, Literal(Constant(name.toString))) - } /** Handle reflection-based dispatch for members of structural types. * @@ -176,11 +164,11 @@ trait Dynamic { * It's an error if U is neither a value nor a method type, or a dependent method * type */ - def handleStructural(tree: Tree)(using Context): Tree = { + def handleStructural(tree: Tree)(using Context): Tree = val fun @ Select(qual, name) = funPart(tree): @unchecked val vargss = termArgss(tree) - def structuralCall(selectorName: TermName, classOfs: => List[Tree]) = { + def structuralCall(selectorName: TermName, classOfs: => List[Tree]) = val selectable = adapt(qual, defn.SelectableClass.typeRef | defn.DynamicClass.typeRef) // ($qual: Selectable).$selectorName("$name") @@ -201,17 +189,16 @@ trait Dynamic { case Apply(fn @ Select(_, nme.applyDynamic), nameArg :: _ :: Nil) => fn.tpe.widen match case mt: MethodType => mt.paramInfos match - case _ :: classOfsParam :: Nil - if classOfsParam.isRepeatedParam - && classOfsParam.argInfos.head.isRef(defn.ClassClass) => + case _ :: classOfsParam :: Nil + if classOfsParam.isRepeatedParam + && classOfsParam.argInfos.head.isRef(defn.ClassClass) => val jlClassType = defn.ClassClass.typeRef.appliedTo(TypeBounds.empty) cpy.Apply(tree)(fn, nameArg :: seqToRepeated(SeqLiteral(classOfs, TypeTree(jlClassType))) :: Nil) - case _ => tree + case _ => tree case other => tree case _ => tree addClassOfs(typed(scall)) - } def fail(reason: String): Tree = errorTree(tree, em"Structural access not allowed on method $name because it $reason") @@ -240,36 +227,31 @@ trait Dynamic { tree maybeBoxed.cast(tpe) - fun.tpe.widen match { + fun.tpe.widen match case tpe: ValueType => structuralCall(nme.selectDynamic, Nil).maybeBoxingCast(tpe) case tpe: MethodType => - def isDependentMethod(tpe: Type): Boolean = tpe match { + def isDependentMethod(tpe: Type): Boolean = tpe match case tpe: MethodType => tpe.isParamDependent || tpe.isResultDependent || isDependentMethod(tpe.resultType) case _ => false - } if (isDependentMethod(tpe)) fail(i"has a method type with inter-parameter dependencies") - else { + else def classOfs = if tpe.paramInfoss.nestedExists(!TypeErasure.hasStableErasure(_)) then fail(i"has a parameter type with an unstable erasure") :: Nil else TypeErasure.erasure(tpe).asInstanceOf[MethodType].paramInfos.map(clsOf(_)) structuralCall(nme.applyDynamic, classOfs).maybeBoxingCast(tpe.finalResultType) - } // (@allanrenucci) I think everything below is dead code case _: PolyType => fail("is polymorphic") case tpe => fail(i"has an unsupported type: $tpe") - } - } -} diff --git a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala index 126d109889e1..2371d2959e64 100644 --- a/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala +++ b/compiler/src/dotty/tools/dotc/typer/ErrorReporting.scala @@ -14,7 +14,7 @@ import reporting._ import collection.mutable -object ErrorReporting { +object ErrorReporting: import tpd._ @@ -27,15 +27,13 @@ object ErrorReporting { def errorTree(tree: untpd.Tree, msg: TypeError, pos: SrcPos)(using Context): tpd.Tree = tree.withType(errorType(msg, pos)) - def errorType(msg: Message, pos: SrcPos)(using Context): ErrorType = { + def errorType(msg: Message, pos: SrcPos)(using Context): ErrorType = report.error(msg, pos) ErrorType(msg) - } - def errorType(ex: TypeError, pos: SrcPos)(using Context): ErrorType = { + def errorType(ex: TypeError, pos: SrcPos)(using Context): ErrorType = report.error(ex, pos) ErrorType(ex.toMessage) - } def wrongNumberOfTypeArgs(fntpe: Type, expectedArgs: List[ParamInfo], actual: List[untpd.Tree], pos: SrcPos)(using Context): ErrorType = errorType(WrongNumberOfTypeArgs(fntpe, expectedArgs, actual), pos) @@ -70,7 +68,7 @@ object ErrorReporting { case _ => foldOver(s, tp) tps.foldLeft("")(collectMatchTrace) - class Errors(using Context) { + class Errors(using Context): /** An explanatory note to be added to error messages * when there's a problem with abstract var defs */ @@ -89,30 +87,26 @@ object ErrorReporting { case tp @ IgnoredProto(deepTp: FunProto) if tp.wasDeepened => deepTp case _ => tp - def expectedTypeStr(tp: Type): String = tp match { + def expectedTypeStr(tp: Type): String = tp match case tp: PolyProto => i"type arguments [${tp.targs.tpes}%, %] and ${expectedTypeStr(revealDeepenedArgs(tp.resultType))}" case tp: FunProto => def argStr(tp: FunProto): String = - val result = revealDeepenedArgs(tp.resultType) match { + val result = revealDeepenedArgs(tp.resultType) match case restp: FunProto => argStr(restp) case _: WildcardType | _: IgnoredProto => "" case tp => i" and expected result type $tp" - } i"(${tp.typedArgs().tpes}%, %)$result" s"arguments ${argStr(tp)}" case _ => i"expected type $tp" - } - def anonymousTypeMemberStr(tpe: Type): String = { - val kind = tpe match { + def anonymousTypeMemberStr(tpe: Type): String = + val kind = tpe match case _: TypeBounds => "type with bounds" case _: MethodOrPoly => "method" case _ => "value of type" - } i"$kind $tpe" - } def overloadedAltsStr(alts: List[SingleDenotation]): String = i"""overloaded alternatives of ${denotStr(alts.head)} with types @@ -123,7 +117,7 @@ object ErrorReporting { else if (denot.symbol.exists) denot.symbol.showLocated else anonymousTypeMemberStr(denot.info) - def refStr(tp: Type): String = tp match { + def refStr(tp: Type): String = tp match case tp: NamedType => if tp.denot.symbol.exists then tp.denot.symbol.showLocated else @@ -132,7 +126,6 @@ object ErrorReporting { case _ => if tp.isType then "type" else "value" s"$kind ${tp.name}" case _ => anonymousTypeMemberStr(tp) - } /** Explain info of symbol `sym` as a member of class `base`. * @param showLocation if true also show sym's location. @@ -156,13 +149,12 @@ object ErrorReporting { def takesNoParamsMsg(tree: Tree, kind: String): Message = if (tree.tpe.widen.exists) em"${exprStr(tree)} does not take ${kind}parameters" - else { + else em"undefined: $tree # ${tree.uniqueId}: ${tree.tpe.toString} at ${ctx.phase}" - } def patternConstrStr(tree: Tree): String = ??? - def typeMismatch(tree: Tree, pt: Type, implicitFailure: SearchFailureType = NoMatchingImplicits): Tree = { + def typeMismatch(tree: Tree, pt: Type, implicitFailure: SearchFailureType = NoMatchingImplicits): Tree = val normTp = normalize(tree.tpe, pt) val normPt = normalize(pt, pt) @@ -185,7 +177,6 @@ object ErrorReporting { case _ => "" errorTree(tree, TypeMismatch(treeTp, expectedTp, Some(tree), implicitFailure.whyNoConversion, missingElse)) - } /** A subtype log explaining why `found` does not conform to `expected` */ def whyNoMatchStr(found: Type, expected: Type): String = @@ -270,7 +261,6 @@ object ErrorReporting { if add.isEmpty then "" else ", but could be made available as an extension method." ++ add end selectErrorAddendum - } def substitutableTypeSymbolsInScope(sym: Symbol)(using Context): List[Symbol] = sym.ownersIterator.takeWhile(!_.is(Flags.Package)).flatMap { ownerSym => @@ -283,4 +273,3 @@ object ErrorReporting { |they must be enabled with a `experimental.dependent` language import or setting""".stripMargin.toMessage def err(using Context): Errors = new Errors -} diff --git a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala index b1513df777ec..e8df7f0e36fc 100644 --- a/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala +++ b/compiler/src/dotty/tools/dotc/typer/EtaExpansion.scala @@ -24,7 +24,7 @@ import Trees._ * default arguments, where we lift also complex pure expressions, since in that case * arguments can be duplicated as arguments to default argument methods. */ -abstract class Lifter { +abstract class Lifter: import tpd._ /** Test indicating `expr` does not need lifting */ @@ -44,7 +44,7 @@ abstract class Lifter { private def lift(defs: mutable.ListBuffer[Tree], expr: Tree, prefix: TermName = EmptyTermName)(using Context): Tree = if (noLift(expr)) expr - else { + else val name = UniqueName.fresh(prefix) // don't instantiate here, as the type params could be further constrained, see tests/pos/pickleinf.scala var liftedType = expr.tpe.widen.deskolemized @@ -58,13 +58,12 @@ abstract class Lifter { .changeNonLocalOwners(lifted) .setDefTree ref(lifted.termRef).withSpan(expr.span.focus) - } /** Lift out common part of lhs tree taking part in an operator assignment such as * * lhs += expr */ - def liftAssigned(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = tree match { + def liftAssigned(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = tree match case Apply(MaybePoly(fn @ Select(pre, name), targs), args) => cpy.Apply(tree)( cpy.Select(fn)( @@ -74,20 +73,18 @@ abstract class Lifter { cpy.Select(tree)(lift(defs, pre), name) case _ => tree - } /** Lift a function argument, stripping any NamedArg wrapper */ private def liftArg(defs: mutable.ListBuffer[Tree], arg: Tree, prefix: TermName = EmptyTermName)(using Context): Tree = - arg match { + arg match case arg @ NamedArg(name, arg1) => cpy.NamedArg(arg)(name, lift(defs, arg1, prefix)) case arg => lift(defs, arg, prefix) - } /** Lift arguments that are not-idempotent into ValDefs in buffer `defs` * and replace by the idents of so created ValDefs. */ def liftArgs(defs: mutable.ListBuffer[Tree], methRef: Type, args: List[Tree])(using Context): List[Tree] = - methRef.widen match { + methRef.widen match case mt: MethodType => args.lazyZip(mt.paramNames).lazyZip(mt.paramInfos).map { (arg, name, tp) => if tp.hasAnnotation(defn.InlineParamAnnot) then arg @@ -97,7 +94,6 @@ abstract class Lifter { } case _ => args.mapConserve(liftArg(defs, _)) - } /** Lift out function prefix and all arguments from application * @@ -112,7 +108,7 @@ abstract class Lifter { * But leave pure expressions alone. * */ - def liftApp(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = tree match { + def liftApp(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = tree match case Apply(fn, args) => val fn1 = liftApp(defs, fn) val args1 = liftArgs(defs, fn.tpe, args) @@ -128,7 +124,6 @@ abstract class Lifter { tree case _ => lift(defs, tree) - } /** Lift prefix `pre` of an application `pre.f(...)` to * @@ -139,28 +134,24 @@ abstract class Lifter { */ def liftPrefix(defs: mutable.ListBuffer[Tree], tree: Tree)(using Context): Tree = if (isIdempotentExpr(tree)) tree else lift(defs, tree) -} /** No lifting at all */ -object NoLift extends Lifter { +object NoLift extends Lifter: def noLift(expr: tpd.Tree)(using Context): Boolean = true -} /** Lift all impure arguments */ -class LiftImpure extends Lifter { +class LiftImpure extends Lifter: def noLift(expr: tpd.Tree)(using Context): Boolean = tpd.isPureExpr(expr) -} object LiftImpure extends LiftImpure /** Lift all impure or complex arguments */ -class LiftComplex extends Lifter { +class LiftComplex extends Lifter: def noLift(expr: tpd.Tree)(using Context): Boolean = tpd.isPurePath(expr) override def exprLifter: Lifter = LiftToDefs -} object LiftComplex extends LiftComplex /** Lift impure + lift the prefixes */ -object LiftCoverage extends LiftImpure { +object LiftCoverage extends LiftImpure: // Property indicating whether we're currently lifting the arguments of an application private val LiftingArgs = new Property.Key[Boolean] @@ -189,24 +180,21 @@ object LiftCoverage extends LiftImpure { override def noLift(expr: tpd.Tree)(using Context) = if liftingArgs then noLiftArg(expr) else super.noLift(expr) - def liftForCoverage(defs: mutable.ListBuffer[tpd.Tree], tree: tpd.Apply)(using Context) = { + def liftForCoverage(defs: mutable.ListBuffer[tpd.Tree], tree: tpd.Apply)(using Context) = val liftedFun = liftApp(defs, tree.fun) val liftedArgs = liftArgs(defs, tree.fun.tpe, tree.args)(using liftingArgsContext) tpd.cpy.Apply(tree)(liftedFun, liftedArgs) - } -} object LiftErased extends LiftComplex: override def isErased = true /** Lift all impure or complex arguments to `def`s */ -object LiftToDefs extends LiftComplex { +object LiftToDefs extends LiftComplex: override def liftedFlags: FlagSet = Method override def liftedDef(sym: TermSymbol, rhs: tpd.Tree)(using Context): tpd.DefDef = tpd.DefDef(sym, rhs) -} /** Lifter for eta expansion */ -object EtaExpansion extends LiftImpure { +object EtaExpansion extends LiftImpure: import tpd._ /** Eta-expanding a tree means converting a method reference to a function value. @@ -262,15 +250,14 @@ object EtaExpansion extends LiftImpure { * be OK. After elimByName they are all converted to regular function types anyway. * But see comment on the `ExprType` case in function `prune` in class `ConstraintHandling`. */ - def etaExpand(tree: Tree, mt: MethodType, xarity: Int)(using Context): untpd.Tree = { + def etaExpand(tree: Tree, mt: MethodType, xarity: Int)(using Context): untpd.Tree = import untpd._ assert(!ctx.isAfterTyper || (ctx.phase eq ctx.base.inliningPhase), ctx.phase) val defs = new mutable.ListBuffer[tpd.Tree] val lifted: Tree = TypedSplice(liftApp(defs, tree)) - val isLastApplication = mt.resultType match { + val isLastApplication = mt.resultType match case rt: MethodType => rt.isImplicitMethod case _ => true - } val paramTypes: List[Tree] = if (isLastApplication && mt.paramInfos.length == xarity) mt.paramInfos map (_ => TypeTree()) else mt.paramInfos map TypeTree @@ -290,5 +277,3 @@ object EtaExpansion extends LiftImpure { else if (mt.hasErasedParams) new untpd.FunctionWithMods(params, body, Modifiers(), mt.erasedParams) else untpd.Function(params, body) if (defs.nonEmpty) untpd.Block(defs.toList map (untpd.TypedSplice(_)), fn) else fn - } -} diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index c6795ed25a0e..0f5f11e070e3 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -44,30 +44,26 @@ object Implicits: /** An implicit definition `implicitRef` that is visible under a different name, `alias`. * Gets generated if an implicit ref is imported via a renaming import. */ - class RenamedImplicitRef(val underlyingRef: TermRef, val alias: TermName) extends ImplicitRef { + class RenamedImplicitRef(val underlyingRef: TermRef, val alias: TermName) extends ImplicitRef: def implicitName(using Context): TermName = alias - } /** Both search candidates and successes are references with a specific nesting level. */ - sealed trait RefAndLevel { + sealed trait RefAndLevel: def ref: TermRef def level: Int - } /** An eligible implicit candidate, consisting of an implicit reference and a nesting level */ - case class Candidate(implicitRef: ImplicitRef, kind: Candidate.Kind, level: Int) extends RefAndLevel { + case class Candidate(implicitRef: ImplicitRef, kind: Candidate.Kind, level: Int) extends RefAndLevel: def ref: TermRef = implicitRef.underlyingRef def isExtension = (kind & Candidate.Extension) != 0 def isConversion = (kind & Candidate.Conversion) != 0 - } - object Candidate { + object Candidate: type Kind = Int inline val None = 0 inline val Value = 1 inline val Conversion = 2 inline val Extension = 4 - } /** If `expected` is a selection prototype, does `tp` have an extension * method with the selecting name? False otherwise. @@ -85,7 +81,7 @@ object Implicits: /** A common base class of contextual implicits and of-type implicits which * represents a set of references to implicit definitions. */ - abstract class ImplicitRefs(initctx: Context) { + abstract class ImplicitRefs(initctx: Context): val irefCtx = if (initctx eq NoContext) initctx else initctx.retractMode(Mode.ImplicitsEnabled) protected given Context = irefCtx @@ -104,25 +100,24 @@ object Implicits: private var mySingletonClass: ClassSymbol | Null = null /** Widen type so that it is neither a singleton type nor a type that inherits from scala.Singleton. */ - private def widenSingleton(tp: Type)(using Context): Type = { + private def widenSingleton(tp: Type)(using Context): Type = if (mySingletonClass == null) mySingletonClass = defn.SingletonClass val wtp = tp.widenSingleton if (wtp.derivesFrom(mySingletonClass.uncheckedNN)) defn.AnyType else wtp - } protected def isAccessible(ref: TermRef)(using Context): Boolean /** Return those references in `refs` that are compatible with type `pt`. */ - protected def filterMatching(pt: Type)(using Context): List[Candidate] = { + protected def filterMatching(pt: Type)(using Context): List[Candidate] = record("filterMatching") val considerExtension = pt match case ViewProto(_, _: SelectionProto) => true case _ => false - def candidateKind(ref: TermRef)(using Context): Candidate.Kind = { /*trace(i"candidateKind $ref $pt")*/ + def candidateKind(ref: TermRef)(using Context): Candidate.Kind = /*trace(i"candidateKind $ref $pt")*/ - def viewCandidateKind(tpw: Type, argType: Type, resType: Type): Candidate.Kind = { + def viewCandidateKind(tpw: Type, argType: Type, resType: Type): Candidate.Kind = def methodCandidateKind(mt: MethodType, approx: Boolean) = if (mt.isImplicitMethod) @@ -136,18 +131,17 @@ object Implicits: else Candidate.None - tpw match { + tpw match case mt: MethodType => methodCandidateKind(mt, approx = false) case poly: PolyType => // We do not need to call ProtoTypes#constrained on `poly` because // `candidateKind` is always called with mode TypevarsMissContext enabled. - poly.resultType match { + poly.resultType match case mt: MethodType => methodCandidateKind(mt, approx = true) case rtp => viewCandidateKind(wildApprox(rtp), argType, resType) - } case tpw: TermRef => // can't discard overloaded refs Candidate.Conversion | (if considerExtension then Candidate.Extension else Candidate.None) @@ -184,15 +178,12 @@ object Implicits: if considerExtension && hasExtMethod(tpw, resType) then Candidate.Extension else Candidate.None conversionKind | extensionKind - } - } - def valueTypeCandidateKind(tpw: Type): Candidate.Kind = tpw.stripPoly match { + def valueTypeCandidateKind(tpw: Type): Candidate.Kind = tpw.stripPoly match case tpw: MethodType => if (tpw.isImplicitMethod) Candidate.Value else Candidate.None case _ => Candidate.Value - } /** Widen singleton arguments of implicit conversions to their underlying type. * This is necessary so that they can be found eligible for the argument type. @@ -216,7 +207,7 @@ object Implicits: var ckind = if !isAccessible(ref) then Candidate.None - else pt match { + else pt match case pt: ViewProto => viewCandidateKind(ref.widen, pt.argType, pt.resType) case _: ValueTypeOrProto => @@ -224,11 +215,10 @@ object Implicits: else valueTypeCandidateKind(ref.widen) case _ => Candidate.Value - } if (ckind == Candidate.None) record("discarded eligible") - else { + else val ptNorm = normalize(pt, pt) // `pt` could be implicit function types, check i2749 val refAdjusted = if (pt.isInstanceOf[ViewProto]) adjustSingletonArg(ref) @@ -237,9 +227,7 @@ object Implicits: Stats.record("eligible check matches") if (!NoViewsAllowed.isCompatible(refNorm, ptNorm)) ckind = Candidate.None - } ckind - } if refs.isEmpty && (!considerExtension || companionRefs.isEmpty) then @@ -260,34 +248,29 @@ object Implicits: if refs.nonEmpty then refs.foreach(tryCandidate(extensionOnly = false)) candidates.toList - } - } /** The implicit references coming from the implicit scope of a type. * @param tp the type determining the implicit scope * @param companionRefs the companion objects in the implicit scope. */ - class OfTypeImplicits(tp: Type, override val companionRefs: TermRefSet)(initctx: Context) extends ImplicitRefs(initctx) { + class OfTypeImplicits(tp: Type, override val companionRefs: TermRefSet)(initctx: Context) extends ImplicitRefs(initctx): implicits.println(i"implicit scope of type $tp = ${companionRefs.showAsList}%, %") - @threadUnsafe lazy val refs: List[ImplicitRef] = { + @threadUnsafe lazy val refs: List[ImplicitRef] = val buf = new mutable.ListBuffer[TermRef] for (companion <- companionRefs) buf ++= companion.implicitMembers buf.toList - } /** The candidates that are eligible for expected type `tp` */ @threadUnsafe lazy val eligible: List[Candidate] = - trace(i"eligible($tp), companions = ${companionRefs.showAsList}%, %", implicitsDetailed, show = true) { + trace(i"eligible($tp), companions = ${companionRefs.showAsList}%, %", implicitsDetailed, show = true): if (refs.nonEmpty && monitored) record(s"check eligible refs in tpe", refs.length) filterMatching(tp) - } override def isAccessible(ref: TermRef)(using Context): Boolean = ref.symbol.exists override def toString: String = i"OfTypeImplicits($tp), companions = ${companionRefs.showAsList}%, %; refs = $refs%, %." - } /** The implicit references coming from the context. * @param refs the implicit references made visible by the current context. @@ -299,7 +282,7 @@ object Implicits: class ContextualImplicits( val refs: List[ImplicitRef], val outerImplicits: ContextualImplicits | Null, - isImport: Boolean)(initctx: Context) extends ImplicitRefs(initctx) { + isImport: Boolean)(initctx: Context) extends ImplicitRefs(initctx): private val eligibleCache = EqHashMap[Type, List[Candidate]]() /** The level increases if current context has a different owner or scope than @@ -321,10 +304,9 @@ object Implicits: /** Is this the outermost implicits? This is the case if it either the implicits * of NoContext, or the last one before it. */ - private def isOuterMost = { + private def isOuterMost = val finalImplicits = NoContext.implicits (this eq finalImplicits) || (outerImplicits eqn finalImplicits) - } private def combineEligibles(ownEligible: List[Candidate], outerEligible: List[Candidate]): List[Candidate] = if ownEligible.isEmpty then outerEligible @@ -346,59 +328,50 @@ object Implicits: Stats.record(i"compute eligible not cached ${tp.getClass}") Stats.record("compute eligible not cached") computeEligible(tp) - else { + else val eligibles = eligibleCache.lookup(tp) - if (eligibles != null) { + if (eligibles != null) Stats.record("cached eligible") eligibles - } else if (irefCtx eq NoContext) Nil - else { + else Stats.record(i"compute eligible cached") val result = computeEligible(tp) eligibleCache(tp) = result result - } - } - private def computeEligible(tp: Type): List[Candidate] = /*>|>*/ trace(i"computeEligible $tp in $refs%, %", implicitsDetailed) /*<|<*/ { + private def computeEligible(tp: Type): List[Candidate] = /*>|>*/ trace(i"computeEligible $tp in $refs%, %", implicitsDetailed): /*<|<*/ if (monitored) record(s"check eligible refs in irefCtx", refs.length) val ownEligible = filterMatching(tp) if isOuterMost then ownEligible else combineEligibles(ownEligible, outerImplicits.nn.eligible(tp)) - } override def isAccessible(ref: TermRef)(using Context): Boolean = ref.symbol.isAccessibleFrom(ref.prefix) - override def toString: String = { + override def toString: String = val own = i"(implicits: $refs%, %)" if (isOuterMost) own else own + "\n " + outerImplicits - } /** This context, or a copy, ensuring root import from symbol `root` * is not present in outer implicits. */ def exclude(root: Symbol): ContextualImplicits = if (this == NoContext.implicits) this - else { + else val outerExcluded = outerImplicits.nn exclude root if (irefCtx.importInfo.nn.site.termSymbol == root) outerExcluded else if (outerExcluded eqn outerImplicits) this else new ContextualImplicits(refs, outerExcluded, isImport)(irefCtx) - } - } /** The result of an implicit search */ - sealed abstract class SearchResult extends Showable { + sealed abstract class SearchResult extends Showable: def tree: Tree def toText(printer: Printer): Text = printer.toText(this) - def recoverWith(other: SearchFailure => SearchResult): SearchResult = this match { + def recoverWith(other: SearchFailure => SearchResult): SearchResult = this match case _: SearchSuccess => this case fail: SearchFailure => other(fail) - } def isSuccess: Boolean = isInstanceOf[SearchSuccess] - } /** A successful search * @param tree The typed tree that needs to be inserted @@ -411,30 +384,27 @@ object Implicits: extends SearchResult with RefAndLevel with Showable /** A failed search */ - case class SearchFailure(tree: Tree) extends SearchResult { + case class SearchFailure(tree: Tree) extends SearchResult: final def isAmbiguous: Boolean = tree.tpe.isInstanceOf[AmbiguousImplicits | TooUnspecific] final def reason: SearchFailureType = tree.tpe.asInstanceOf[SearchFailureType] - } - object SearchFailure { - def apply(tpe: SearchFailureType, span: Span)(using Context): SearchFailure = { + object SearchFailure: + def apply(tpe: SearchFailureType, span: Span)(using Context): SearchFailure = val id = tpe match case tpe: (AmbiguousImplicits | TooUnspecific) => untpd.SearchFailureIdent(nme.AMBIGUOUS, s"/* ambiguous: ${tpe.explanation} */") case _ => untpd.SearchFailureIdent(nme.MISSING, "/* missing */") SearchFailure(id.withTypeUnchecked(tpe).withSpan(span)) - } - } - abstract class SearchFailureType extends ErrorType { + abstract class SearchFailureType extends ErrorType: def expectedType: Type def argument: Tree /** A "massaging" function for displayed types to give better info in error diagnostics */ def clarify(tp: Type)(using Context): Type = tp - final protected def qualify(using Context): String = expectedType match { + final protected def qualify(using Context): String = expectedType match case SelectionProto(name, mproto, _, _) if !argument.isEmpty => i"provide an extension method `$name` on ${argument.tpe}" case NoType => @@ -443,16 +413,14 @@ object Implicits: case _ => if (argument.isEmpty) i"match type ${clarify(expectedType)}" else i"convert from ${argument.tpe} to ${clarify(expectedType)}" - } /** If search was for an implicit conversion, a note describing the failure * in more detail - this is either empty or starts with a '\n' */ def whyNoConversion(using Context): String = "" - } class NoMatchingImplicits(val expectedType: Type, val argument: Tree, constraint: Constraint = OrderingConstraint.empty) - extends SearchFailureType { + extends SearchFailureType: /** Replace all type parameters in constraint by their bounds, to make it clearer * what was expected @@ -460,7 +428,7 @@ object Implicits: override def clarify(tp: Type)(using Context): Type = val ctx1 = ctx.fresh.setExploreTyperState() ctx1.typerState.constraint = constraint - inContext(ctx1) { + inContext(ctx1): val map = new TypeMap: def apply(t: Type): Type = t match case t: TypeParamRef => @@ -473,22 +441,19 @@ object Implicits: mapOver(t) override def mapArgs(args: List[Type], tparams: List[ParamInfo]) = - args.mapConserve { + args.mapConserve: case t: TypeParamRef => constraint.entry(t) match case bounds: TypeBounds => TypeComparer.fullBounds(t) case _ => this(t) case t => this(t) - } end map map(tp) - } def msg(using Context): Message = em"no implicit values were found that $qualify" override def toString = s"NoMatchingImplicits($expectedType, $argument)" - } @sharable object NoMatchingImplicits extends NoMatchingImplicits(NoType, EmptyTree, OrderingConstraint.empty) @@ -512,7 +477,7 @@ object Implicits: override def toString = s"TooUnspecific" /** An ambiguous implicits failure */ - class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree) extends SearchFailureType { + class AmbiguousImplicits(val alt1: SearchSuccess, val alt2: SearchSuccess, val expectedType: Type, val argument: Tree) extends SearchFailureType: def msg(using Context): Message = var str1 = err.refStr(alt1.ref) var str2 = err.refStr(alt2.ref) @@ -528,21 +493,18 @@ object Implicits: i""" |Note that implicit $what cannot be applied because they are ambiguous; |$explanation""" - } class MismatchedImplicit(ref: TermRef, val expectedType: Type, - val argument: Tree) extends SearchFailureType { + val argument: Tree) extends SearchFailureType: def msg(using Context): Message = em"${err.refStr(ref)} does not $qualify" - } class DivergingImplicit(ref: TermRef, val expectedType: Type, - val argument: Tree) extends SearchFailureType { + val argument: Tree) extends SearchFailureType: def msg(using Context): Message = em"${err.refStr(ref)} produces a diverging implicit search when trying to $qualify" - } /** A search failure type for attempted ill-typed extension method calls */ class FailedExtension(extApp: Tree, val expectedType: Type, val whyFailed: Message) extends SearchFailureType: @@ -570,10 +532,9 @@ object Implicits: class MacroErrorsFailure(errors: List[Diagnostic.Error], val expectedType: Type, - val argument: Tree) extends SearchFailureType { + val argument: Tree) extends SearchFailureType: def msg(using Context): Message = em"${errors.map(_.msg).mkString("\n")}" - } end Implicits import Implicits._ @@ -680,11 +641,11 @@ trait ImplicitRunInfo: case iscope: OfTypeImplicits => companions ++= iscope.companionRefs case null => t match - case t: TypeRef => - val sym = t.symbol - val pre = t.prefix - addPath(pre) - addCompanion(pre, + case t: TypeRef => + val sym = t.symbol + val pre = t.prefix + addPath(pre) + addCompanion(pre, if sym.isClass then sym.companionModule else pre.member(sym.name.toTermName) .suchThat(companion => companion.is(Module) && companion.owner == sym.owner) @@ -694,13 +655,13 @@ trait ImplicitRunInfo: // `A | Unit` to `js.UndefOrOps[A]`. To keep this conversion in scope // in Scala 3, where we re-interpret `js.|` as a real union, we inject // it in the scope of `Unit`. - if t.isRef(defn.UnitClass) && ctx.settings.scalajs.value then - companions += JSDefinitions.jsdefn.UnionOpsModuleRef + if t.isRef(defn.UnitClass) && ctx.settings.scalajs.value then + companions += JSDefinitions.jsdefn.UnionOpsModuleRef - if sym.isClass then - for p <- t.parents do companions ++= iscopeRefs(p) - else - companions ++= iscopeRefs(t.underlying) + if sym.isClass then + for p <- t.parents do companions ++= iscopeRefs(p) + else + companions ++= iscopeRefs(t.underlying) end addCompanions def addPath(pre: Type): Unit = pre.dealias match @@ -827,11 +788,11 @@ trait Implicits: override def viewExists(from: Type, to: Type)(using Context): Boolean = !from.isError - && !to.isError - && !ctx.isAfterTyper - && ctx.mode.is(Mode.ImplicitsEnabled) - && from.isValueType - && ( from.isValueSubType(to) + && !to.isError + && !ctx.isAfterTyper + && ctx.mode.is(Mode.ImplicitsEnabled) + && from.isValueType + && ( from.isValueSubType(to) || inferView(dummyTreeOfType(from), to) (using ctx.fresh.addMode(Mode.ImplicitExploration).setExploreTyperState()).isSuccess // TODO: investigate why we can't TyperState#test here @@ -840,45 +801,39 @@ trait Implicits: /** Find an implicit conversion to apply to given tree `from` so that the * result is compatible with type `to`. */ - def inferView(from: Tree, to: Type)(using Context): SearchResult = { + def inferView(from: Tree, to: Type)(using Context): SearchResult = record("inferView") if !ctx.mode.is(Mode.ImplicitsEnabled) || from.isInstanceOf[Super] then NoMatchingImplicitsFailure - else { - def adjust(to: Type) = to.stripTypeVar.widenExpr match { + else + def adjust(to: Type) = to.stripTypeVar.widenExpr match case SelectionProto(name, memberProto, compat, true) => SelectionProto(name, memberProto, compat, privateOK = false) case tp => tp - } def isOldStyleFunctionConversion(tpe: Type): Boolean = - tpe match { + tpe match case PolyType(_, resType) => isOldStyleFunctionConversion(resType) case _ => tpe.derivesFrom(defn.FunctionSymbol(1)) && !tpe.derivesFrom(defn.ConversionClass) && !tpe.derivesFrom(defn.SubTypeClass) - } try val inferred = inferImplicit(adjust(to), from, from.span) - inferred match { + inferred match case SearchSuccess(_, ref, _, false) if isOldStyleFunctionConversion(ref.underlying) => report.migrationWarning( em"The conversion ${ref} will not be applied implicitly here in Scala 3 because only implicit methods and instances of Conversion class will continue to work as implicit views.", from ) case _ => - } inferred - catch { + catch case ex: AssertionError => implicits.println(s"view $from ==> $to") implicits.println(ctx.typerState.constraint.show) implicits.println(TypeComparer.explained(_.isSubType(from.tpe, to))) throw ex - } - } - } private var synthesizer: Synthesizer | Null = null @@ -900,12 +855,11 @@ trait Implicits: /** Search an implicit argument and report error if not found */ - def implicitArgTree(formal: Type, span: Span)(using Context): Tree = { + def implicitArgTree(formal: Type, span: Span)(using Context): Tree = val arg = inferImplicitArg(formal, span) if (arg.tpe.isInstanceOf[SearchFailureType]) report.error(missingArgMsg(arg, formal, ""), ctx.source.atSpan(span)) arg - } /** @param arg Tree representing a failed result of implicit search * @param pt Type for which an implicit value was searched @@ -917,7 +871,7 @@ trait Implicits: pt: Type, where: String, paramSymWithMethodCallTree: Option[(Symbol, Tree)] = None - )(using Context): Message = { + )(using Context): Message = def findHiddenImplicitsCtx(c: Context): Context = if c == NoContext then c else c.freshOver(findHiddenImplicitsCtx(c.outer)).addMode(Mode.FindHiddenImplicits) @@ -926,14 +880,12 @@ trait Implicits: case fail: SearchFailureType => if (fail.expectedType eq pt) || isFullyDefined(fail.expectedType, ForceDegree.none) then inferImplicit(fail.expectedType, fail.argument, arg.span)( - using findHiddenImplicitsCtx(ctx)) match { + using findHiddenImplicitsCtx(ctx)) match case s: SearchSuccess => Some(s) case f: SearchFailure => - f.reason match { + f.reason match case ambi: AmbiguousImplicits => Some(ambi.alt1) case r => None - } - } else // It's unsafe to search for parts of the expected type if they are not fully defined, // since these come with nested contexts that are lost at this point. See #7249 for an @@ -945,11 +897,10 @@ trait Implicits: else currImplicits.refs ::: allImplicits(currImplicits.outerImplicits) /** Whether the given type is for an implicit def that's a Scala 2 implicit conversion */ - def isImplicitDefConversion(typ: Type): Boolean = typ match { + def isImplicitDefConversion(typ: Type): Boolean = typ match case PolyType(_, resType) => isImplicitDefConversion(resType) case mt: MethodType => !mt.isImplicitMethod && !mt.isContextualMethod case _ => false - } def ignoredConvertibleImplicits = arg.tpe match case fail: SearchFailureType => @@ -968,11 +919,10 @@ trait Implicits: Nil MissingImplicitArgument(arg, pt, where, paramSymWithMethodCallTree, ignoredInstanceNormalImport, ignoredConvertibleImplicits) - } /** A string indicating the formal parameter corresponding to a missing argument */ def implicitParamString(paramName: TermName, methodStr: String, tree: Tree)(using Context): String = - tree match { + tree match case Select(qual, nme.apply) if defn.isFunctionType(qual.tpe.widen) => val qt = qual.tpe.widen val qt1 = qt.dealiasKeepAnnots @@ -981,23 +931,21 @@ trait Implicits: case _ => i"${ if paramName.is(EvidenceParamName) then "an implicit parameter" else s"parameter $paramName" } of $methodStr" - } /** A CanEqual[T, U] instance is assumed * - if one of T, U is an error type, or * - if one of T, U is a subtype of the lifted version of the other, * unless strict equality is set. */ - def assumedCanEqual(ltp: Type, rtp: Type)(using Context) = { + def assumedCanEqual(ltp: Type, rtp: Type)(using Context) = // Map all non-opaque abstract types to their upper bound. // This is done to check whether such types might plausibly be comparable to each other. - val lift = new TypeMap { - def apply(t: Type): Type = t match { + val lift = new TypeMap: + def apply(t: Type): Type = t match case t: TypeRef => - t.info match { + t.info match case TypeBounds(lo, hi) if lo.ne(hi) && !t.symbol.is(Opaque) => apply(hi) case _ => t - } case t: SingletonType => apply(t.widen) case t: RefinedType => @@ -1006,24 +954,20 @@ trait Implicits: t case _ => if (variance > 0) mapOver(t) else t - } - } ltp.isError || rtp.isError || !strictEquality && (ltp <:< lift(rtp) || rtp <:< lift(ltp)) - } /** Check that equality tests between types `ltp` and `rtp` make sense */ def checkCanEqual(ltp: Type, rtp: Type, span: Span)(using Context): Unit = - if (!ctx.isAfterTyper && !assumedCanEqual(ltp, rtp)) { + if (!ctx.isAfterTyper && !assumedCanEqual(ltp, rtp)) val res = implicitArgTree(defn.CanEqualClass.typeRef.appliedTo(ltp, rtp), span) implicits.println(i"CanEqual witness found for $ltp / $rtp: $res: ${res.tpe}") - } object hasSkolem extends TreeAccumulator[Boolean]: def apply(x: Boolean, tree: Tree)(using Context): Boolean = - x || { + x `||`: tree match case tree: Ident => tree.symbol.isSkolem case Select(qual, _) => apply(x, qual) @@ -1031,7 +975,6 @@ trait Implicits: case TypeApply(fn, _) => apply(x, fn) case _: This => false case _ => foldOver(x, tree) - } /** Find an implicit parameter or conversion. * @param pt The expected type of the parameter or conversion. @@ -1040,7 +983,7 @@ trait Implicits: * @param span The position where errors should be reported. */ def inferImplicit(pt: Type, argument: Tree, span: Span)(using Context): SearchResult = - trace(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) { + trace(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true): record("inferImplicit") assert(ctx.phase.allowsImplicitSearch, if (argument.isEmpty) i"missing implicit parameter of type $pt after typer at phase ${ctx.phase.phaseName}" @@ -1069,7 +1012,7 @@ trait Implicits: else NoMatchingImplicitsFailure val result = - result0 match { + result0 match case result: SearchSuccess => if result.tstate ne ctx.typerState then result.tstate.commit() @@ -1084,7 +1027,7 @@ trait Implicits: val deepPt = pt.deepenProto if (deepPt ne pt) inferImplicit(deepPt, argument, span) else if (migrateTo3 && !ctx.mode.is(Mode.OldOverloadingResolution)) - withMode(Mode.OldOverloadingResolution)(inferImplicit(pt, argument, span)) match { + withMode(Mode.OldOverloadingResolution)(inferImplicit(pt, argument, span)) match case altResult: SearchSuccess => report.migrationWarning( result.reason.msg @@ -1093,19 +1036,16 @@ trait Implicits: altResult case _ => result - } else result case NoMatchingImplicitsFailure if usableForInference => SearchFailure(new NoMatchingImplicits(pt, argument, ctx.typerState.constraint), span) case _ => result0 - } // If we are at the outermost implicit search then emit the implicit dictionary, if any. ctx.searchHistory.emitDictionary(span, result) - } /** Try to typecheck an implicit reference */ - def typedImplicit(cand: Candidate, pt: Type, argument: Tree, span: Span)(using Context): SearchResult = trace(i"typed implicit ${cand.ref}, pt = $pt, implicitsEnabled == ${ctx.mode is ImplicitsEnabled}", implicits, show = true) { + def typedImplicit(cand: Candidate, pt: Type, argument: Tree, span: Span)(using Context): SearchResult = trace(i"typed implicit ${cand.ref}, pt = $pt, implicitsEnabled == ${ctx.mode is ImplicitsEnabled}", implicits, show = true): if ctx.run.nn.isCancelled then NoMatchingImplicitsFailure else record("typedImplicit") @@ -1120,13 +1060,13 @@ trait Implicits: else // otherwise we can skip typing and go directly to adapt adapt(generated, pt.widenExpr, locked) - else { + else def untpdGenerated = untpd.TypedSplice(generated) def producesConversion(info: Type): Boolean = info match case info: PolyType => producesConversion(info.resType) case info: MethodType if info.isImplicitMethod => producesConversion(info.resType) case _ => info.derivesFrom(defn.ConversionClass) - def tryConversion(using Context) = { + def tryConversion(using Context) = val untpdConv = if ref.symbol.is(Given) && producesConversion(ref.symbol.info) then untpd.Select( @@ -1139,7 +1079,6 @@ trait Implicits: typed( untpd.Apply(untpdConv, untpd.TypedSplice(argument) :: Nil), pt, locked) - } pt match case selProto @ SelectionProto(selName: TermName, mbrType, _, _) => @@ -1170,11 +1109,10 @@ trait Implicits: else tryConversionForSelection case _ => tryConversion - } if ctx.reporter.hasErrors || !cand.ref.symbol.isAccessibleFrom(cand.ref.prefix) then - val res = adapted.tpe match { + val res = adapted.tpe match case _: SearchFailureType => SearchFailure(adapted) case error: PreviousErrorType if !adapted.symbol.isAccessibleFrom(cand.ref.prefix) => SearchFailure(adapted.withType(new NestedFailure(error.msg, pt))) @@ -1187,12 +1125,10 @@ trait Implicits: SearchFailure(adapted.withType(new MacroErrorsFailure(ctx.reporter.allErrors.reverse, pt, argument))) else SearchFailure(adapted.withType(new MismatchedImplicit(ref, pt, argument))) - } ctx.reporter.removeBufferedMessages res else SearchSuccess(adapted, ref, cand.level, cand.isExtension)(ctx.typerState, ctx.gadt) - } /** An implicit search; parameters as in `inferImplicit` */ class ImplicitSearch(protected val pt: Type, protected val argument: Tree, span: Span)(using Context): @@ -1305,9 +1241,8 @@ trait Implicits: else ctx.typerState - diff = inContext(ctx.withTyperState(comparisonState)) { + diff = inContext(ctx.withTyperState(comparisonState)): compare(ref1, ref2) - } case _ => if diff < 0 then alt2 else if diff > 0 then alt1 @@ -1333,7 +1268,7 @@ trait Implicits: * - otherwise add the failure to `rfailures` and continue testing the other candidates. */ def rank(pending: List[Candidate], found: SearchResult, rfailures: List[SearchFailure]): SearchResult = - pending match { + pending match case cand :: remaining => /** To recover from an ambiguous implicit failure, we need to find a pending * candidate that is strictly better than the failed candidate(s). @@ -1343,7 +1278,7 @@ trait Implicits: val newPending = remaining.filter(betterThanFailed) rank(newPending, fail, Nil).recoverWith(_ => fail) - negateIfNot(tryImplicit(cand, contextual)) match { + negateIfNot(tryImplicit(cand, contextual)) match case fail: SearchFailure => if fail eq ImplicitSearchTooLargeFailure then fail @@ -1362,7 +1297,7 @@ trait Implicits: case best: SearchSuccess => if (ctx.mode.is(Mode.ImplicitExploration) || isCoherent) best - else disambiguate(found, best) match { + else disambiguate(found, best) match case retained: SearchSuccess => val newPending = if (retained eq found) || remaining.isEmpty then remaining @@ -1376,16 +1311,13 @@ trait Implicits: healAmbiguous(fail, newCand => compareAlternatives(newCand, ambi.alt1) > 0 && compareAlternatives(newCand, ambi.alt2) > 0) - } - } case nil => if (rfailures.isEmpty) found else found.recoverWith(_ => rfailures.reverse.maxBy(_.tree.treeSize)) - } def negateIfNot(result: SearchResult) = if (isNotGiven) - result match { + result match case _: SearchFailure => SearchSuccess(ref(defn.NotGiven_value), defn.NotGiven_value.termRef, 0)( ctx.typerState.fresh().setCommittable(true), @@ -1393,7 +1325,6 @@ trait Implicits: ) case _: SearchSuccess => NoMatchingImplicitsFailure - } else result def warnAmbiguousNegation(ambi: AmbiguousImplicits) = @@ -1536,9 +1467,8 @@ trait Implicits: val eligible = if contextual then if ctx.gadt.isNarrowing then - withoutMode(Mode.ImplicitsEnabled) { + withoutMode(Mode.ImplicitsEnabled): ctx.implicits.uncachedEligible(wildProto) - } else ctx.implicits.eligible(wildProto) else implicitScope(wildProto).eligible searchImplicit(eligible, contextual) match @@ -1549,14 +1479,13 @@ trait Implicits: case _: AmbiguousImplicits => failure case reason => if contextual then - searchImplicit(contextual = false).recoverWith { + searchImplicit(contextual = false).recoverWith: failure2 => failure2.reason match case _: AmbiguousImplicits => failure2 case _ => reason match case (_: DivergingImplicit) => failure case _ => List(failure, failure2).maxBy(_.tree.treeSize) - } else failure end searchImplicit @@ -1580,13 +1509,11 @@ trait Implicits: def implicitScope(tp: Type): OfTypeImplicits = ctx.run.nn.implicitScope(tp) /** All available implicits, without ranking */ - def allImplicits: Set[SearchSuccess] = { + def allImplicits: Set[SearchSuccess] = val contextuals = ctx.implicits.eligible(wildProto).map(tryImplicit(_, contextual = true)) val inscope = implicitScope(wildProto).eligible.map(tryImplicit(_, contextual = false)) - (contextuals.toSet ++ inscope).collect { + (contextuals.toSet ++ inscope).collect: case success: SearchSuccess => success - } - } /** Fields needed for divergence checking */ @threadUnsafe lazy val ptCoveringSet = wideProto.coveringSet @@ -1762,14 +1689,13 @@ final class SearchRoot extends SearchHistory: * @result The TermRef of the corresponding dictionary entry. */ override def linkBynameImplicit(tpe: Type)(using Context): TermRef = - implicitDictionary.get(tpe) match { + implicitDictionary.get(tpe) match case Some((ref, _)) => ref case None => val lazyImplicit = newLazyImplicit(tpe) val ref = lazyImplicit.termRef implicitDictionary.put(tpe, (ref, tpd.EmptyTree)) ref - } /** * Look up an implicit dictionary entry by type. @@ -1799,12 +1725,11 @@ final class SearchRoot extends SearchHistory: * is an under-construction by name implicit, the provided result otherwise. */ override def defineBynameImplicit(tpe: Type, result: SearchSuccess)(using Context): SearchResult = - implicitDictionary.get(tpe) match { + implicitDictionary.get(tpe) match case Some((ref, _)) => implicitDictionary.put(tpe, (ref, result.tree)) SearchSuccess(tpd.ref(ref).withSpan(result.tree.span), result.ref, result.level)(result.tstate, result.gstate) case None => result - } /** * Emit the implicit dictionary at the completion of an implicit search. @@ -1817,7 +1742,7 @@ final class SearchRoot extends SearchHistory: override def emitDictionary(span: Span, result: SearchResult)(using Context): SearchResult = if (myImplicitDictionary == null || implicitDictionary.isEmpty) result else - result match { + result match case failure: SearchFailure => failure case success: SearchSuccess => import tpd._ @@ -1828,25 +1753,23 @@ final class SearchRoot extends SearchHistory: // eliminating entries until all remaining entries are at least transtively referred // to in the outermost result term. @tailrec - def prune(trees: List[Tree], pending: List[(TermRef, Tree)], acc: List[(TermRef, Tree)]): List[(TermRef, Tree)] = pending match { + def prune(trees: List[Tree], pending: List[(TermRef, Tree)], acc: List[(TermRef, Tree)]): List[(TermRef, Tree)] = pending match case Nil => acc case ps => - val (in, out) = ps.partition { + val (in, out) = ps.partition: case (vref, rhs) => trees.exists(_.existsSubTree { case id: Ident => id.symbol == vref.symbol case _ => false }) - } if (in.isEmpty) acc else prune(in.map(_._2) ++ trees, out, in ++ acc) - } val pruned = prune(List(success.tree), implicitDictionary.map(_._2).toList, Nil) myImplicitDictionary = null if (pruned.isEmpty) result else if (pruned.exists(_._2 == EmptyTree)) NoMatchingImplicitsFailure - else { + else // If there are any dictionary entries remaining after pruning, construct a dictionary // class of the form, // @@ -1882,9 +1805,8 @@ final class SearchRoot extends SearchHistory: }) val nrhss = rhss.map(rhsMap(_)) - val vdefs = (nsyms zip nrhss) map { + val vdefs = (nsyms zip nrhss) map: case (nsym, nrhs) => ValDef(nsym.asTerm, nrhs.changeNonLocalOwners(nsym)) - } val constr = newConstructor(classSym, Synthetic, Nil, Nil).entered val classDef = ClassDef(classSym, DefDef(constr), vdefs) @@ -1904,8 +1826,6 @@ final class SearchRoot extends SearchHistory: val blk = Block(classDef :: inst :: Nil, res).withSpan(span) success.copy(tree = blk)(success.tstate, success.gstate) - } - } end SearchRoot /** A set of term references where equality is =:= */ @@ -1939,11 +1859,10 @@ sealed class TermRefSet(using Context): elems.forEach(handle) // used only for debugging - def showAsList: List[TermRef] = { + def showAsList: List[TermRef] = val buffer = new mutable.ListBuffer[TermRef] foreach(tr => buffer += tr) buffer.toList - } override def toString = showAsList.toString diff --git a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala index b5be2daf873b..e2cbbac4ef96 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportInfo.scala @@ -12,7 +12,7 @@ import StdNames.nme import printing.Texts.Text import NameKinds.QualifiedName -object ImportInfo { +object ImportInfo: case class RootRef(refFn: () => TermRef, isPredef: Boolean = false) @@ -39,7 +39,6 @@ object ImportInfo { def withRootImports: Context = given Context = c c.withRootImports(defn.rootImportFns) -} /** Info relating to an import clause * @param symf A function that computes the import symbol defined by the clause @@ -52,27 +51,23 @@ object ImportInfo { class ImportInfo(symf: Context ?=> Symbol, val selectors: List[untpd.ImportSelector], val qualifier: untpd.Tree, - val isRootImport: Boolean = false) extends Showable { + val isRootImport: Boolean = false) extends Showable: - private def symNameOpt = qualifier match { + private def symNameOpt = qualifier match case ref: untpd.RefTree => Some(ref.name.asTermName) case _ => None - } - def importSym(using Context): Symbol = { - if (mySym == null) { + def importSym(using Context): Symbol = + if (mySym == null) mySym = symf assert(mySym != null) - } mySym.uncheckedNN - } private var mySym: Symbol | Null = _ /** The (TermRef) type of the qualifier of the import clause */ - def site(using Context): Type = importSym.info match { + def site(using Context): Type = importSym.info match case ImportType(expr) => expr.tpe case _ => NoType - } /** The names that are excluded from any wildcard import */ def excluded: Set[TermName] = { ensureInitialized(); myExcluded.nn } @@ -225,4 +220,3 @@ class ImportInfo(symf: Context ?=> Symbol, featureCache(feature).nn def toText(printer: Printer): Text = printer.toText(this) -} diff --git a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala index a9b53f0783bd..fc41a2c07e01 100644 --- a/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala +++ b/compiler/src/dotty/tools/dotc/typer/ImportSuggestions.scala @@ -59,14 +59,13 @@ trait ImportSuggestions: val seen = mutable.Set[TermRef]() def lookInside(root: Symbol)(using Context): Boolean = - explore { + explore: if root.is(Package) then root.isTerm && root.isCompleted else !root.name.is(FlatName) && !root.name.lastPart.contains('$') && root.is(ModuleVal, butNot = JavaDefined) // The implicits in `scalajs.js.|` are implementation details and shouldn't be suggested && !(root.name == nme.raw.BAR && ctx.settings.scalajs.value && root == JSDefinitions.jsdefn.PseudoUnionModule) - } def nestedRoots(site: Type)(using Context): List[Symbol] = val seenNames = mutable.Set[Name]() @@ -157,13 +156,12 @@ trait ImportSuggestions: // Candidates that are already available without explicit import because they // are already provided by the context (imported or inherited) or because they // are in the implicit scope of `pt`. - val alreadyAvailableCandidates: Set[Symbol] = { + val alreadyAvailableCandidates: Set[Symbol] = val wildProto = wildApprox(pt) val contextualCandidates = ctx.implicits.eligible(wildProto) val implicitScopeCandidates = ctx.run.nn.implicitScope(wildProto).eligible val allCandidates = contextualCandidates ++ implicitScopeCandidates allCandidates.map(_.implicitRef.underlyingRef.symbol).toSet - } def testContext(): Context = ctx.fresh.retractMode(Mode.ImplicitsEnabled).setExploreTyperState() @@ -173,7 +171,7 @@ trait ImportSuggestions: */ def shallowTest(ref: TermRef): Boolean = System.currentTimeMillis < deadLine - && inContext(testContext()) { + && inContext(testContext()): def test(pt: Type): Boolean = pt match case ViewProto(argType, OrType(rt1, rt2)) => // Union types do not constrain results, since comparison with a union @@ -185,7 +183,6 @@ trait ImportSuggestions: case _ => normalize(ref, pt) <:< pt test(pt) - } /** Test whether a full given term can be synthesized that matches * the expected type `pt`. @@ -201,9 +198,9 @@ trait ImportSuggestions: val (expectedType, argument, kind) = pt match case ViewProto(argType, resType) => (resType, - untpd.Ident(ref.name).withSpan(span).withType(argType), - if hasExtMethod(ref, resType) then Candidate.Extension - else Candidate.Conversion) + untpd.Ident(ref.name).withSpan(span).withType(argType), + if hasExtMethod(ref, resType) then Candidate.Extension + else Candidate.Conversion) case _ => (pt, EmptyTree, Candidate.Value) val candidate = Candidate(ref, kind, 0) diff --git a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala index 0e1c41ceef74..b21bd6c16bb4 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inferencing.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inferencing.scala @@ -16,7 +16,7 @@ import reporting._ import collection.mutable import scala.annotation.internal.sharable -object Inferencing { +object Inferencing: import tpd._ @@ -66,9 +66,9 @@ object Inferencing { /** Instantiate any type variables in `tp` whose bounds contain a reference to * one of the parameters in `paramss`. */ - def instantiateDependent(tp: Type, paramss: List[List[Symbol]])(using Context): Unit = { - val dependentVars = new TypeAccumulator[Set[TypeVar]] { - def apply(tvars: Set[TypeVar], tp: Type) = tp match { + def instantiateDependent(tp: Type, paramss: List[List[Symbol]])(using Context): Unit = + val dependentVars = new TypeAccumulator[Set[TypeVar]]: + def apply(tvars: Set[TypeVar], tp: Type) = tp match case tp: TypeVar if !tp.isInstantiated && TypeComparer.bounds(tp.origin) @@ -77,11 +77,8 @@ object Inferencing { tvars + tp case _ => foldOver(tvars, tp) - } - } val depVars = dependentVars(Set(), tp) if (depVars.nonEmpty) instantiateSelected(tp, depVars.toList) - } /** If `tp` is top-level type variable with a lower bound in the current constraint, * instantiate it from below. We also look for TypeVars in other places where @@ -156,50 +153,47 @@ object Inferencing { * to their upper bound. */ private class IsFullyDefinedAccumulator(force: ForceDegree.Value, minimizeSelected: Boolean = false) - (using Context) extends TypeAccumulator[Boolean] { + (using Context) extends TypeAccumulator[Boolean]: - private def instantiate(tvar: TypeVar, fromBelow: Boolean): Type = { + private def instantiate(tvar: TypeVar, fromBelow: Boolean): Type = val inst = tvar.instantiate(fromBelow) typr.println(i"forced instantiation of ${tvar.origin} = $inst") inst - } private var toMaximize: List[TypeVar] = Nil - def apply(x: Boolean, tp: Type): Boolean = trace(i"isFullyDefined($tp, $force)", typr) { - try { - val tpd = tp.dealias - if tpd ne tp then apply(x, tpd) - else tp match - case _: WildcardType | _: ProtoType => - false - case tvar: TypeVar if !tvar.isInstantiated => - force.appliesTo(tvar) - && ctx.typerState.constraint.contains(tvar) - && { - var fail = false - val direction = instDirection(tvar.origin) - if minimizeSelected then - if direction <= 0 && tvar.hasLowerBound then - instantiate(tvar, fromBelow = true) - else if direction >= 0 && tvar.hasUpperBound then - instantiate(tvar, fromBelow = false) + def apply(x: Boolean, tp: Type): Boolean = trace(i"isFullyDefined($tp, $force)", typr): + try + val tpd = tp.dealias + if tpd ne tp then apply(x, tpd) + else tp match + case _: WildcardType | _: ProtoType => + false + case tvar: TypeVar if !tvar.isInstantiated => + force.appliesTo(tvar) + && ctx.typerState.constraint.contains(tvar) + && { + var fail = false + val direction = instDirection(tvar.origin) + if minimizeSelected then + if direction <= 0 && tvar.hasLowerBound then + instantiate(tvar, fromBelow = true) + else if direction >= 0 && tvar.hasUpperBound then + instantiate(tvar, fromBelow = false) // else hold off instantiating unbounded unconstrained variable - else if direction != 0 then - instantiate(tvar, fromBelow = direction < 0) - else if variance >= 0 && (force.ifBottom == IfBottom.ok && !tvar.hasUpperBound || tvar.hasLowerBound) then - instantiate(tvar, fromBelow = true) - else if variance >= 0 && force.ifBottom == IfBottom.fail then - fail = true - else - toMaximize = tvar :: toMaximize - !fail && foldOver(x, tvar) - } - case tp => foldOver(x, tp) - } + else if direction != 0 then + instantiate(tvar, fromBelow = direction < 0) + else if variance >= 0 && (force.ifBottom == IfBottom.ok && !tvar.hasUpperBound || tvar.hasLowerBound) then + instantiate(tvar, fromBelow = true) + else if variance >= 0 && force.ifBottom == IfBottom.fail then + fail = true + else + toMaximize = tvar :: toMaximize + !fail && foldOver(x, tvar) + } + case tp => foldOver(x, tp) catch case ex: Throwable => handleRecursive("check fully defined", tp.show, ex) - } def process(tp: Type): Boolean = // Maximize type vars in the order they were visited before */ @@ -217,17 +211,15 @@ object Inferencing { process(tp) // might have type uninstantiated variables themselves. } ) - } - def approximateGADT(tp: Type)(using Context): Type = { + def approximateGADT(tp: Type)(using Context): Type = val map = new ApproximateGadtAccumulator val res = map(tp) assert(!map.failed) res - } /** Approximates a type to get rid of as many GADT-constrained abstract types as possible. */ - private class ApproximateGadtAccumulator(using Context) extends TypeMap { + private class ApproximateGadtAccumulator(using Context) extends TypeMap: var failed = false @@ -260,7 +252,7 @@ object Inferencing { * member selection (note that given/extension lookup doesn't need GADT * approx, see gadt-approximation-interaction.scala). */ - def apply(tp: Type): Type = tp.dealias match { + def apply(tp: Type): Type = tp.dealias match case tp @ TypeRef(qual, nme) if variance != 0 && ctx.gadt.contains(tp.symbol) => @@ -275,39 +267,31 @@ object Inferencing { case tp => mapOver(tp) - } - def process(tp: Type): Type = { + def process(tp: Type): Type = apply(tp) - } - } /** For all type parameters occurring in `tp`: * If the bounds of `tp` in the current constraint are equal wrt =:=, * instantiate the type parameter to the lower bound's approximation * (approximation because of possible F-bounds). */ - def replaceSingletons(tp: Type)(using Context): Unit = { - val tr = new TypeTraverser { - def traverse(tp: Type): Unit = { - tp match { + def replaceSingletons(tp: Type)(using Context): Unit = + val tr = new TypeTraverser: + def traverse(tp: Type): Unit = + tp match case param: TypeParamRef => val constraint = accCtx.typerState.constraint - constraint.entry(param) match { + constraint.entry(param) match case TypeBounds(lo, hi) if (hi frozen_<:< lo) => val inst = TypeComparer.approximation(param, fromBelow = true) typr.println(i"replace singleton $param := $inst") accCtx.typerState.constraint = constraint.replace(param, inst) case _ => - } case _ => - } traverseChildren(tp) - } - } tr.traverse(tp) - } /** If `tree` has a type lambda type, infer its type parameters by comparing with expected type `pt` */ def inferTypeParams(tree: Tree, pt: Type)(using Context): Tree = tree.tpe match @@ -333,39 +317,34 @@ object Inferencing { * - The prefix `p` of a selection `p.f`. * - The result expression `e` of a block `{s1; .. sn; e}`. */ - def tvarsInParams(tree: Tree, locked: TypeVars)(using Context): List[TypeVar] = { - @tailrec def boundVars(tree: Tree, acc: List[TypeVar]): List[TypeVar] = tree match { + def tvarsInParams(tree: Tree, locked: TypeVars)(using Context): List[TypeVar] = + @tailrec def boundVars(tree: Tree, acc: List[TypeVar]): List[TypeVar] = tree match case Apply(fn, _) => boundVars(fn, acc) case TypeApply(fn, targs) => - val tvars = targs.filter(_.isInstanceOf[InferredTypeTree]).tpes.collect { + val tvars = targs.filter(_.isInstanceOf[InferredTypeTree]).tpes.collect: case tvar: TypeVar if !tvar.isInstantiated && ctx.typerState.ownedVars.contains(tvar) && !locked.contains(tvar) => tvar - } boundVars(fn, acc ::: tvars) case Select(pre, _) => boundVars(pre, acc) case Block(_, expr) => boundVars(expr, acc) case _ => acc - } @tailrec def occurring(tree: Tree, toTest: List[TypeVar], acc: List[TypeVar]): List[TypeVar] = if (toTest.isEmpty) acc - else tree match { + else tree match case Apply(fn, _) => - fn.tpe.widen match { + fn.tpe.widen match case mtp: MethodType => val (occ, nocc) = toTest.partition(tvar => mtp.paramInfos.exists(tvar.occursIn)) occurring(fn, nocc, occ ::: acc) case _ => occurring(fn, toTest, acc) - } case TypeApply(fn, targs) => occurring(fn, toTest, acc) case Select(pre, _) => occurring(pre, toTest, acc) case Block(_, expr) => occurring(expr, toTest, acc) case _ => acc - } occurring(tree, boundVars(tree, Nil), Nil) - } /** The instantiation direction for given poly param computed * from the constraint: @@ -373,7 +352,7 @@ object Inferencing { * -1 (minimize) if constraint is uniformly from below, * 0 if unconstrained, or constraint is from below and above. */ - private def instDirection(param: TypeParamRef)(using Context): Int = { + private def instDirection(param: TypeParamRef)(using Context): Int = val constrained = TypeComparer.fullBounds(param) val original = param.binder.paramInfos(param.paramNum) val cmp = TypeComparer @@ -382,27 +361,25 @@ object Inferencing { val approxAbove = if (!cmp.isSubTypeWhenFrozen(original.hi, constrained.hi)) 1 else 0 approxAbove - approxBelow - } /** Following type aliases and stripping refinements and annotations, if one arrives at a * class type reference where the class has a companion module, a reference to * that companion module. Otherwise NoType */ def companionRef(tp: Type)(using Context): Type = - tp.underlyingClassRef(refinementOK = true) match { + tp.underlyingClassRef(refinementOK = true) match case tp: TypeRef => val companion = tp.classSymbol.companionModule if (companion.exists) companion.termRef.asSeenFrom(tp.prefix, companion.owner) else NoType case _ => NoType - } /** Instantiate undetermined type variables so that type `tp` is maximized. * @return The list of type symbols that were created * to instantiate undetermined type variables that occur non-variantly */ - def maximizeType(tp: Type, span: Span)(using Context): List[Symbol] = { + def maximizeType(tp: Type, span: Span)(using Context): List[Symbol] = Stats.record("maximizeType") val vs = variances(tp) val patternBindings = new mutable.ListBuffer[(Symbol, TypeParamRef)] @@ -415,18 +392,16 @@ object Inferencing { // Eg pos/precise-pattern-type the T in Tree[-T] doesn't occur in any GADT bound so can maximise to Tree[Type] val safeToInstantiate = v != 0 && gadtBounds.forall(!tvar.occursIn(_)) if safeToInstantiate then tvar.instantiate(fromBelow = v == -1) - else { + else val bounds = TypeComparer.fullBounds(tvar.origin) if (bounds.hi frozen_<:< bounds.lo) || bounds.hi.classSymbol.is(Final) then tvar.instantiate(fromBelow = false) - else { + else // We do not add the created symbols to GADT constraint immediately, since they may have inter-dependencies. // Instead, we simultaneously add them later on. val wildCard = newPatternBoundSymbol(UniqueName.fresh(tvar.origin.paramName), bounds, span, addToGadt = false) tvar.instantiateWith(wildCard.typeRef) patternBindings += ((wildCard, tvar.origin)) - } - } } val res = patternBindings.toList.map { (boundSym, _) => // substitute bounds of pattern bound variables to deal with possible F-bounds @@ -438,7 +413,6 @@ object Inferencing { // We add the created symbols to GADT constraint here. if (res.nonEmpty) ctx.gadtState.addToConstraint(res) res - } type VarianceMap = SimpleIdentityMap[TypeVar, Integer] @@ -465,13 +439,13 @@ object Inferencing { * * we want to instantiate U to x.type right away. No need to wait further. */ - private def variances(tp: Type, pt: Type = WildcardType)(using Context): VarianceMap = { + private def variances(tp: Type, pt: Type = WildcardType)(using Context): VarianceMap = Stats.record("variances") val constraint = ctx.typerState.constraint - object accu extends TypeAccumulator[VarianceMap] { + object accu extends TypeAccumulator[VarianceMap]: def setVariance(v: Int) = variance = v - def apply(vmap: VarianceMap, t: Type): VarianceMap = t match { + def apply(vmap: VarianceMap, t: Type): VarianceMap = t match case t: TypeVar if !t.isInstantiated && accCtx.typerState.constraint.contains(t) => val v = vmap(t) @@ -480,8 +454,6 @@ object Inferencing { else vmap.updated(t, 0) case _ => foldOver(vmap, t) - } - } /** Include in `vmap` type variables occurring in the constraints of type variables * already in `vmap`. Specifically: @@ -493,7 +465,7 @@ object Inferencing { * bounds as non-variant. * Do this in a fixpoint iteration until `vmap` stabilizes. */ - def propagate(vmap: VarianceMap): VarianceMap = { + def propagate(vmap: VarianceMap): VarianceMap = var vmap1 = vmap def traverse(tp: Type) = { vmap1 = accu(vmap1, tp) } vmap.foreachBinding { (tvar, v) => @@ -510,34 +482,30 @@ object Inferencing { case _ => } if (vmap1 eq vmap) vmap else propagate(vmap1) - } propagate(accu(accu(SimpleIdentityMap.empty, tp), pt.finalResultType)) - } /** Run the transformation after dealiasing but return the original type if it was a no-op. */ - private def derivedOnDealias(tp: Type)(transform: Type => Type)(using Context) = { + private def derivedOnDealias(tp: Type)(transform: Type => Type)(using Context) = val dealiased = tp.dealias val transformed = transform(dealiased) if transformed eq dealiased then tp // return the original type, not the result of dealiasing else transformed - } /** Replace every top-level occurrence of a wildcard type argument by * a fresh skolem type. The skolem types are of the form $i.CAP, where * $i is a skolem of type `scala.internal.TypeBox`, and `CAP` is its * type member. See the documentation of `TypeBox` for a rationale why we do this. */ - def captureWildcards(tp: Type)(using Context): Type = derivedOnDealias(tp) { + def captureWildcards(tp: Type)(using Context): Type = derivedOnDealias(tp): case tp @ AppliedType(tycon, args) if tp.hasWildcardArg => val tparams = tycon.typeParamSymbols - val args1 = args.zipWithConserve(tparams.map(_.paramInfo.substApprox(tparams, args))) { + val args1 = args.zipWithConserve(tparams.map(_.paramInfo.substApprox(tparams, args))): case (TypeBounds(lo, hi), bounds) => val skolem = SkolemType(defn.TypeBoxClass.typeRef.appliedTo(lo | bounds.loBound, hi & bounds.hiBound)) TypeRef(skolem, defn.TypeBox_CAP) case (arg, _) => arg - } if tparams.isEmpty then tp else tp.derivedAppliedType(tycon, args1) case tp: AndOrType => tp.derivedAndOrType(captureWildcards(tp.tp1), captureWildcards(tp.tp2)) case tp: RefinedType => tp.derivedRefinedType(captureWildcards(tp.parent), tp.refinedName, tp.refinedInfo) @@ -545,12 +513,10 @@ object Inferencing { case tp: LazyRef => captureWildcards(tp.ref) case tp: AnnotatedType => tp.derivedAnnotatedType(captureWildcards(tp.parent), tp.annot) case _ => tp - } def hasCaptureConversionArg(tp: Type)(using Context): Boolean = tp match case tp: AppliedType => tp.args.exists(_.typeSymbol == defn.TypeBox_CAP) case _ => false -} trait Inferencing { this: Typer => import Inferencing._ @@ -588,7 +554,7 @@ trait Inferencing { this: Typer => val ownedVars = state.ownedVars if (ownedVars ne locked) && !ownedVars.isEmpty then val qualifying = ownedVars -- locked - if (!qualifying.isEmpty) { + if (!qualifying.isEmpty) typr.println(i"interpolate $tree: ${tree.tpe.widen} in $state, pt = $pt, owned vars = ${state.ownedVars.toList}%, %, qualifying = ${qualifying.toList}%, %, previous = ${locked.toList}%, % / ${state.constraint}") val resultAlreadyConstrained = tree.isInstanceOf[Apply] || tree.tpe.isInstanceOf[MethodOrPoly] @@ -738,7 +704,6 @@ trait Inferencing { this: Typer => end doInstantiate doInstantiate(filterByDeps(toInstantiate)) - } end if tree end interpolateTypeVars @@ -772,14 +737,13 @@ trait Inferencing { this: Typer => } /** An enumeration controlling the degree of forcing in "is-fully-defined" checks. */ -@sharable object ForceDegree { +@sharable object ForceDegree: class Value(val appliesTo: TypeVar => Boolean, val ifBottom: IfBottom): override def toString = s"ForceDegree.Value(.., $ifBottom)" val none: Value = new Value(_ => false, IfBottom.ok) { override def toString = "ForceDegree.none" } val all: Value = new Value(_ => true, IfBottom.ok) { override def toString = "ForceDegree.all" } val failBottom: Value = new Value(_ => true, IfBottom.fail) { override def toString = "ForceDegree.failBottom" } val flipBottom: Value = new Value(_ => true, IfBottom.flip) { override def toString = "ForceDegree.flipBottom" } -} enum IfBottom: case ok, fail, flip diff --git a/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala b/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala index 89caf5e1c474..29265ebc9980 100644 --- a/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/JavaChecks.scala @@ -7,9 +7,9 @@ import ast.tpd._ /** PostTyper doesn't run on java sources, * but some checks still need to be applied. */ -object JavaChecks { +object JavaChecks: /** Check the bounds of AppliedTypeTrees. */ - private object AppliedTypeChecker extends TreeTraverser { + private object AppliedTypeChecker extends TreeTraverser: def traverse(tree: Tree)(using Context): Unit = tree match case tpt: TypeTree => Checking.checkAppliedTypesIn(tpt) @@ -17,10 +17,8 @@ object JavaChecks { Checking.checkAppliedType(tree) case _ => traverseChildren(tree) - } /** Scan a tree and check it. */ def check(tree: Tree)(using Context): Unit = report.debuglog("checking type bounds in " + ctx.compilationUnit.source.name) AppliedTypeChecker.traverse(tree) -} diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index df708057dd71..eccbf537f85f 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -94,16 +94,14 @@ class Namer { typer: Typer => private var lateCompile = false /** The symbol of the given expanded tree. */ - def symbolOfTree(tree: Tree)(using Context): Symbol = { + def symbolOfTree(tree: Tree)(using Context): Symbol = val xtree = expanded(tree) - xtree.getAttachment(TypedAhead) match { + xtree.getAttachment(TypedAhead) match case Some(ttree) => ttree.symbol case none => xtree.getAttachment(SymOfTree) match case Some(sym) => sym case _ => throw IllegalArgumentException(i"$xtree does not have a symbol") - } - } def hasDefinedSymbol(tree: Tree)(using Context): Boolean = val xtree = expanded(tree) @@ -112,19 +110,17 @@ class Namer { typer: Typer => /** The enclosing class with given name; error if none exists */ def enclosingClassNamed(name: TypeName, span: Span)(using Context): Symbol = if (name.isEmpty) NoSymbol - else { + else val cls = ctx.owner.enclosingClassNamed(name) if (!cls.exists) report.error(UnknownNamedEnclosingClassOrObject(name), ctx.source.atSpan(span)) cls - } /** Record `sym` as the symbol defined by `tree` */ - def recordSym(sym: Symbol, tree: Tree)(using Context): Symbol = { + def recordSym(sym: Symbol, tree: Tree)(using Context): Symbol = for (refs <- tree.removeAttachment(References); ref <- refs) ref.watching(sym) tree.pushAttachment(SymOfTree, sym) sym - } /** Check that a new definition with given name and privacy status * in current context would not conflict with existing currently @@ -178,7 +174,7 @@ class Namer { typer: Typer => /** If this tree is a member def or an import, create a symbol of it * and store in symOfTree map. */ - def createSymbol(tree: Tree)(using Context): Symbol = { + def createSymbol(tree: Tree)(using Context): Symbol = def privateWithinClass(mods: Modifiers) = enclosingClassNamed(mods.privateWithin, tree.span) @@ -192,18 +188,16 @@ class Namer { typer: Typer => */ def checkFlags(flags: FlagSet) = if (flags.isEmpty) flags - else { - val (ok, adapted, kind) = tree match { + else + val (ok, adapted, kind) = tree match case tree: TypeDef => (flags.isTypeFlags, flags.toTypeFlags, "type") case _ => (flags.isTermFlags, flags.toTermFlags, "value") - } def canBeLocal = tree match case tree: MemberDef => SymDenotations.canBeLocal(tree.name, flags) case _ => false if !ok then report.error(em"modifier(s) `${flags.flagsString}` incompatible with $kind definition", tree.srcPos) if adapted.is(Private) && canBeLocal then adapted | Local else adapted - } /** Add moduleClass/sourceModule to completer if it is for a module val or class */ def adjustIfModule(completer: LazyType, tree: MemberDef) = @@ -215,7 +209,7 @@ class Namer { typer: Typer => /** Create new symbol or redefine existing symbol under lateCompile. */ def createOrRefine[S <: Symbol]( tree: MemberDef, name: Name, flags: FlagSet, owner: Symbol, infoFn: S => Type, - symFn: (FlagSet, S => Type, Symbol) => S): Symbol = { + symFn: (FlagSet, S => Type, Symbol) => S): Symbol = val prev = if (lateCompile && ctx.owner.is(Package)) ctx.effectiveScope.lookup(name) else NoSymbol @@ -223,24 +217,21 @@ class Namer { typer: Typer => var flags1 = flags var privateWithin = privateWithinClass(tree.mods) val effectiveOwner = owner.skipWeakOwner - if (flags.is(Private) && effectiveOwner.is(Package)) { + if (flags.is(Private) && effectiveOwner.is(Package)) // If effective owner is a package p, widen private to private[p] flags1 = flags1 &~ PrivateLocal privateWithin = effectiveOwner - } val sym = - if (prev.exists) { + if (prev.exists) prev.flags = flags1 prev.info = infoFn(prev.asInstanceOf[S]) prev.setPrivateWithin(privateWithin) prev - } else symFn(flags1, infoFn, privateWithin) recordSym(sym, tree) - } - tree match { + tree match case tree: TypeDef if tree.isClassDef => val flags = checkFlags(tree.mods.flags) val name = checkNoConflict(tree.name, flags.is(Private), tree.span).asTypeName @@ -295,8 +286,6 @@ class Namer { typer: Typer => recordSym(newImportSymbol(ctx.owner, Completer(tree)(ctx), tree.span), tree) case _ => NoSymbol - } - } /** If `sym` exists, enter it in effective scope. Check that * package members are not entered twice in the same run. @@ -308,84 +297,70 @@ class Namer { typer: Typer => ctx.enter(sym) /** Create package if it does not yet exist. */ - private def createPackageSymbol(pid: RefTree)(using Context): Symbol = { - val pkgOwner = pid match { + private def createPackageSymbol(pid: RefTree)(using Context): Symbol = + val pkgOwner = pid match case Ident(_) => if (ctx.owner eq defn.EmptyPackageClass) defn.RootClass else ctx.owner case Select(qual: RefTree, _) => createPackageSymbol(qual).moduleClass - } val existing = pkgOwner.info.decls.lookup(pid.name) - if (existing.is(Package) && (pkgOwner eq existing.owner)) { - existing.moduleClass.denot match { + if (existing.is(Package) && (pkgOwner eq existing.owner)) + existing.moduleClass.denot match case d: PackageClassDenotation => // Remove existing members coming from a previous compilation of this file, // they are obsolete. d.unlinkFromFile(ctx.source.file) case _ => - } existing - } - else { + else /** If there's already an existing type, then the package is a dup of this type */ val existingType = pkgOwner.info.decls.lookup(pid.name.toTypeName) - if (existingType.exists) { + if (existingType.exists) report.error(PkgDuplicateSymbol(existingType), pid.srcPos) newCompletePackageSymbol(pkgOwner, (pid.name ++ "$_error_").toTermName).entered - } else newCompletePackageSymbol(pkgOwner, pid.name.asTermName).entered - } - } /** Expand tree and store in `expandedTree` */ - def expand(tree: Tree)(using Context): Unit = { + def expand(tree: Tree)(using Context): Unit = def record(expanded: Tree) = - if (expanded `ne` tree) { + if (expanded `ne` tree) typr.println(i"Expansion: $tree expands to $expanded") tree.pushAttachment(ExpandedTree, expanded) - } - tree match { + tree match case tree: DefTree => record(desugar.defTree(tree)) case tree: PackageDef => record(desugar.packageDef(tree)) case tree: ExtMethods => record(desugar.extMethods(tree)) case _ => - } - } /** The expanded version of this tree, or tree itself if not expanded */ - def expanded(tree: Tree)(using Context): Tree = tree match { + def expanded(tree: Tree)(using Context): Tree = tree match case _: DefTree | _: PackageDef | _: ExtMethods => tree.attachmentOrElse(ExpandedTree, tree) case _ => tree - } /** For all class definitions `stat` in `xstats`: If the companion class is * not also defined in `xstats`, invalidate it by setting its info to * NoType. */ - def invalidateCompanions(pkg: Symbol, xstats: List[untpd.Tree])(using Context): Unit = { + def invalidateCompanions(pkg: Symbol, xstats: List[untpd.Tree])(using Context): Unit = val definedNames = xstats collect { case stat: NameTree => stat.name } def invalidate(name: TypeName) = - if (!(definedNames contains name)) { + if (!(definedNames contains name)) val member = pkg.info.decl(name).asSymDenotation if (member.isClass && !(member.is(Package))) member.markAbsent() - } - xstats foreach { + xstats foreach: case stat: TypeDef if stat.isClassDef => invalidate(stat.name.moduleClassName) case _ => - } - } /** Expand tree and create top-level symbols for statement and enter them into symbol table */ - def index(stat: Tree)(using Context): Context = { + def index(stat: Tree)(using Context): Context = expand(stat) indexExpanded(stat) - } /** Create top-level symbols for all statements in the expansion of this statement and * enter them into symbol table */ - def indexExpanded(origStat: Tree)(using Context): Context = { - def recur(stat: Tree): Context = stat match { + def indexExpanded(origStat: Tree)(using Context): Context = + def recur(stat: Tree): Context = stat match case pcl: PackageDef => val pkg = createPackageSymbol(pcl.pid) index(pcl.stats)(using ctx.fresh.setOwner(pkg.moduleClass)) @@ -405,9 +380,7 @@ class Namer { typer: Typer => ctx case _ => ctx - } recur(expanded(origStat)) - } /** Determines whether this field holds an enum constant. */ def isEnumConstant(vd: ValDef)(using Context): Boolean = @@ -470,72 +443,65 @@ class Namer { typer: Typer => * in reverse order of their start positions. * @pre `child` must have a position. */ - final def addChild(cls: Symbol, child: Symbol)(using Context): Unit = { + final def addChild(cls: Symbol, child: Symbol)(using Context): Unit = val childStart = if (child.span.exists) child.span.start else -1 def insertInto(annots: List[Annotation]): List[Annotation] = - annots.find(_.symbol == defn.ChildAnnot) match { + annots.find(_.symbol == defn.ChildAnnot) match case Some(Annotation.Child(other)) if other.span.exists && childStart <= other.span.start => if (child == other) annots // can happen if a class has several inaccessible children - else { + else assert(childStart != other.span.start || child.source != other.source, i"duplicate child annotation $child / $other") val (prefix, otherAnnot :: rest) = annots.span(_.symbol != defn.ChildAnnot): @unchecked prefix ::: otherAnnot :: insertInto(rest) - } case _ => Annotation.Child(child, cls.span.startPos) :: annots - } cls.annotations = insertInto(cls.annotations) - } /** Add java enum constants */ - def addEnumConstants(mdef: DefTree, sym: Symbol)(using Context): Unit = mdef match { + def addEnumConstants(mdef: DefTree, sym: Symbol)(using Context): Unit = mdef match case vdef: ValDef if (isEnumConstant(vdef)) => val enumClass = sym.owner.linkedClass if (!enumClass.is(Sealed)) enumClass.setFlag(Flags.AbstractSealed) addChild(enumClass, sym) case _ => - } - def setDocstring(sym: Symbol, tree: Tree)(using Context): Unit = tree match { + def setDocstring(sym: Symbol, tree: Tree)(using Context): Unit = tree match case t: MemberDef if t.rawComment.isDefined => ctx.docCtx.foreach(_.addDocstring(sym, t.rawComment)) case t: ExtMethods => for meth <- t.methods.find(_.span.point == sym.span.point) do setDocstring(sym, meth) case _ => () - } /** Create top-level symbols for statements and enter them into symbol table * @return A context that reflects all imports in `stats`. */ - def index(stats: List[Tree])(using Context): Context = { + def index(stats: List[Tree])(using Context): Context = // module name -> (stat, moduleCls | moduleVal) val moduleClsDef = mutable.Map[TypeName, (Tree, TypeDef)]() val moduleValDef = mutable.Map[TermName, (Tree, ValDef)]() /** Remove the subtree `tree` from the expanded tree of `mdef` */ - def removeInExpanded(mdef: Tree, tree: Tree): Unit = { + def removeInExpanded(mdef: Tree, tree: Tree): Unit = val Thicket(trees) = expanded(mdef): @unchecked mdef.putAttachment(ExpandedTree, Thicket(trees.filter(_ != tree))) - } /** Transfer all references to `from` to `to` */ - def transferReferences(from: ValDef, to: ValDef): Unit = { + def transferReferences(from: ValDef, to: ValDef): Unit = val fromRefs = from.removeAttachment(References).getOrElse(Nil) val toRefs = to.removeAttachment(References).getOrElse(Nil) to.putAttachment(References, fromRefs ++ toRefs) - } /** Merge the module class `modCls` in the expanded tree of `mdef` with the * body and derived clause of the synthetic module class `fromCls`. */ - def mergeModuleClass(mdef: Tree, modCls: TypeDef, fromCls: TypeDef): TypeDef = { + def mergeModuleClass(mdef: Tree, modCls: TypeDef, fromCls: TypeDef): TypeDef = var res: TypeDef | Null = null val Thicket(trees) = expanded(mdef): @unchecked val merged = trees.map { tree => - if (tree == modCls) { + if (tree == modCls) val fromTempl = fromCls.rhs.asInstanceOf[Template] val modTempl = modCls.rhs.asInstanceOf[Template] res = cpy.TypeDef(modCls)( @@ -546,21 +512,18 @@ class Namer { typer: Typer => // toString should only be generated if explicit companion is missing case _ => true } ++ modTempl.body)) - if (fromTempl.derived.nonEmpty) { + if (fromTempl.derived.nonEmpty) if (modTempl.derived.nonEmpty) report.error(em"a class and its companion cannot both have `derives` clauses", mdef.srcPos) // `res` is inside a closure, so the flow-typing doesn't work here. res.uncheckedNN.putAttachment(desugar.DerivingCompanion, fromTempl.srcPos.startPos) - } res.uncheckedNN - } else tree } mdef.putAttachment(ExpandedTree, Thicket(merged)) res.nn - } /** Merge `fromCls` of `fromStat` into `toCls` of `toStat` * if the former is synthetic and the latter not. @@ -570,56 +533,48 @@ class Namer { typer: Typer => * 2. `fromCls` and `toCls` are necessarily different */ def mergeIfSynthetic(fromStat: Tree, fromCls: TypeDef, toStat: Tree, toCls: TypeDef): Unit = - if (fromCls.mods.is(Synthetic) && !toCls.mods.is(Synthetic)) { + if (fromCls.mods.is(Synthetic) && !toCls.mods.is(Synthetic)) removeInExpanded(fromStat, fromCls) val mcls = mergeModuleClass(toStat, toCls, fromCls) mcls.setMods(toCls.mods) moduleClsDef(fromCls.name) = (toStat, mcls) - } /** Merge the definitions of a synthetic companion generated by a case class * and the real companion, if both exist. */ - def mergeCompanionDefs() = { + def mergeCompanionDefs() = def valid(mdef: MemberDef): Boolean = mdef.mods.is(Module, butNot = Package) for (stat <- stats) - expanded(stat) match { + expanded(stat) match case Thicket(trees) => // companion object always expands to thickets - trees.map { + trees.map: case mcls @ TypeDef(name, impl: Template) if valid(mcls) => - (moduleClsDef.get(name): @unchecked) match { + (moduleClsDef.get(name): @unchecked) match case Some((stat1, mcls1@TypeDef(_, impl1: Template))) => mergeIfSynthetic(stat, mcls, stat1, mcls1) mergeIfSynthetic(stat1, mcls1, stat, mcls) case None => moduleClsDef(name) = (stat, mcls) - } case vdef @ ValDef(name, _, _) if valid(vdef) => - moduleValDef.get(name) match { + moduleValDef.get(name) match case Some((stat1, vdef1)) => - if (vdef.mods.is(Synthetic) && !vdef1.mods.is(Synthetic)) { + if (vdef.mods.is(Synthetic) && !vdef1.mods.is(Synthetic)) transferReferences(vdef, vdef1) removeInExpanded(stat, vdef) - } - else if (!vdef.mods.is(Synthetic) && vdef1.mods.is(Synthetic)) { + else if (!vdef.mods.is(Synthetic) && vdef1.mods.is(Synthetic)) transferReferences(vdef1, vdef) removeInExpanded(stat1, vdef1) moduleValDef(name) = (stat, vdef) - } else { // double definition of objects or case classes, handled elsewhere } case None => moduleValDef(name) = (stat, vdef) - } case _ => - } case _ => - } - } val classDef = mutable.Map[TypeName, TypeDef]() val moduleDef = mutable.Map[TypeName, TypeDef]() @@ -627,38 +582,33 @@ class Namer { typer: Typer => /** Create links between companion object and companion class. * Populate `moduleDef` and `classDef` as a side effect. */ - def createCompanionLinks()(using Context): Unit = { + def createCompanionLinks()(using Context): Unit = def updateCache(cdef: TypeDef): Unit = if (cdef.isClassDef && !cdef.mods.is(Package)) if (cdef.mods.is(ModuleClass)) moduleDef(cdef.name) = cdef else classDef(cdef.name) = cdef - def createLinks(classTree: TypeDef, moduleTree: TypeDef)(using Context) = { + def createLinks(classTree: TypeDef, moduleTree: TypeDef)(using Context) = val claz = ctx.effectiveScope.lookup(classTree.name) val modl = ctx.effectiveScope.lookup(moduleTree.name) modl.registerCompanion(claz) claz.registerCompanion(modl) - } for (stat <- stats) - expanded(stat) match { + expanded(stat) match case cdef : TypeDef => updateCache(cdef) case Thicket(trees) => - trees.map { + trees.map: case cdef: TypeDef => updateCache(cdef) case _ => - } case _ => - } for (cdef @ TypeDef(name, _) <- classDef.values) - moduleDef.getOrElse(name.moduleClassName, EmptyTree) match { + moduleDef.getOrElse(name.moduleClassName, EmptyTree) match case t: TypeDef => createLinks(cdef, t) case EmptyTree => - } - } /** If a top-level object or class has no companion in the current run, we * enter a dummy companion (`denot.isAbsent` returns true) or constructor @@ -707,12 +657,10 @@ class Namer { typer: Typer => stats.foreach(expand) mergeCompanionDefs() val ctxWithStats = stats.foldLeft(ctx)((ctx, stat) => indexExpanded(stat)(using ctx)) - inContext(ctxWithStats) { + inContext(ctxWithStats): createCompanionLinks() addAbsentCompanions() - } ctxWithStats - } /** Parse the source and index symbols in the compilation unit's untpdTree * while asserting the `lateCompile` flag. This will cause any old @@ -741,16 +689,14 @@ class Namer { typer: Typer => if (unit.isJava) new JavaParser(unit.source).parse() else new Parser(unit.source).parse() - atPhase(Phases.typerPhase) { - inContext(PrepareInlineable.initContext(ctx)) { + atPhase(Phases.typerPhase): + inContext(PrepareInlineable.initContext(ctx)): // inline body annotations are set in namer, capturing the current context // we need to prepare the context for inlining. lateEnter() typeCheckCB { () => lateTypeCheck() } - } - } end lateEnterUnit /** The type bound on wildcard imports of an import list, with special values @@ -765,13 +711,12 @@ class Namer { typer: Typer => else bound } - def missingType(sym: Symbol, modifier: String)(using Context): Unit = { + def missingType(sym: Symbol, modifier: String)(using Context): Unit = report.error(em"${modifier}type of implicit definition needs to be given explicitly", sym.srcPos) sym.resetFlag(GivenOrImplicit) - } /** The completer of a symbol defined by a member def or import (except ClassSymbols) */ - class Completer(val original: Tree)(ictx: Context) extends LazyType with SymbolLoaders.SecondCompleter { + class Completer(val original: Tree)(ictx: Context) extends LazyType with SymbolLoaders.SecondCompleter: protected def localContext(owner: Symbol): FreshContext = ctx.fresh.setOwner(owner).setTree(original) @@ -797,14 +742,13 @@ class Namer { typer: Typer => typr.println(s"error while completing ${imp.expr}") throw ex - final override def complete(denot: SymDenotation)(using Context): Unit = { - if (Config.showCompletions && ctx.typerState != creationContext.typerState) { + final override def complete(denot: SymDenotation)(using Context): Unit = + if (Config.showCompletions && ctx.typerState != creationContext.typerState) def levels(c: Context): Int = if (c.typerState eq creationContext.typerState) 0 else if (c.outer.typerState == c.typerState) levels(c.outer) else levels(c.outer) + 1 println(s"!!!completing ${denot.symbol.showLocated} in buried typerState, gap = ${levels(ctx)}") - } val creationRunId = creationContext.runId if ctx.runId > creationRunId then assert(ctx.mode.is(Mode.Interactive), s"completing $denot in wrong run ${ctx.runId}, was created in $creationRunId") @@ -818,7 +762,6 @@ class Namer { typer: Typer => val completer = SuspendCompleter() denot.info = completer completer.complete(denot) - } private var completedTypeParamSyms: List[TypeSymbol] | Null = null @@ -829,22 +772,19 @@ class Namer { typer: Typer => if completedTypeParamSyms != null then completedTypeParamSyms.uncheckedNN else Nil - protected def addAnnotations(sym: Symbol): Unit = original match { + protected def addAnnotations(sym: Symbol): Unit = original match case original: untpd.MemberDef => lazy val annotCtx = annotContext(original, sym) - for (annotTree <- original.mods.annotations) { + for (annotTree <- original.mods.annotations) val cls = typedAheadAnnotationClass(annotTree)(using annotCtx) if (cls eq sym) report.error(em"An annotation class cannot be annotated with iself", annotTree.srcPos) - else { + else val ann = Annotation.deferred(cls)(typedAheadExpr(annotTree)(using annotCtx)) sym.addAnnotation(ann) - } - } case _ => - } - private def addInlineInfo(sym: Symbol) = original match { + private def addInlineInfo(sym: Symbol) = original match case original: untpd.DefDef if sym.isInlineMethod => def rhsToInline(using Context): tpd.Tree = if !original.symbol.exists && !hasDefinedSymbol(original) then @@ -855,7 +795,6 @@ class Namer { typer: Typer => PrepareInlineable.wrapRHS(original, mdef.tpt, mdef.rhs) PrepareInlineable.registerInlineInfo(sym, rhsToInline)(using localContext(sym)) case _ => - } /** Invalidate `denot` by overwriting its info with `NoType` if * `denot` is a compiler generated case class method that clashes @@ -863,10 +802,9 @@ class Namer { typer: Typer => */ private def invalidateIfClashingSynthetic(denot: SymDenotation): Unit = def isCaseClassOrCompanion(owner: Symbol) = - owner.isClass && { + owner.isClass `&&`: if (owner.is(Module)) owner.linkedClass.is(CaseClass) else owner.is(CaseClass) - } def definesMember = denot.owner.info.decls.lookupAll(denot.name).exists(alt => @@ -900,10 +838,10 @@ class Namer { typer: Typer => /** If completed symbol is an enum value or a named class, register it as a child * in all direct parent classes which are sealed. */ - def registerIfChild(denot: SymDenotation)(using Context): Unit = { + def registerIfChild(denot: SymDenotation)(using Context): Unit = val sym = denot.symbol - def register(child: Symbol, parentCls: ClassSymbol) = { + def register(child: Symbol, parentCls: ClassSymbol) = if (parentCls.is(Sealed)) if ((child.isInaccessibleChildOf(parentCls) || child.isAnonymousClass) && !sym.hasAnonymousChild) addChild(parentCls, parentCls) @@ -913,7 +851,6 @@ class Namer { typer: Typer => report.error(em"""children of $parentCls were already queried before $sym was discovered. |As a remedy, you could move $sym on the same nesting level as $parentCls.""", child.srcPos) - } if denot.isClass && !sym.isEnumAnonymClass && !sym.isRefinementClass then val child = if (denot.is(Module)) denot.sourceModule else denot.symbol @@ -922,12 +859,11 @@ class Namer { typer: Typer => assert(denot.is(Enum), denot) denot.info.classSymbols.foreach { parent => register(denot.symbol, parent) } end if - } /** Intentionally left without `implicit ctx` parameter. We need * to pick up the context at the point where the completer was created. */ - def completeInCreationContext(denot: SymDenotation): Unit = { + def completeInCreationContext(denot: SymDenotation): Unit = val sym = denot.symbol addAnnotations(sym) addInlineInfo(sym) @@ -935,11 +871,9 @@ class Namer { typer: Typer => invalidateIfClashingSynthetic(denot) Checking.checkWellFormed(sym) denot.info = avoidPrivateLeaks(sym) - } - } class TypeDefCompleter(original: TypeDef)(ictx: Context) - extends Completer(original)(ictx) with TypeParamsCompleter { + extends Completer(original)(ictx) with TypeParamsCompleter: private var myTypeParams: List[TypeSymbol] | Null = null private var nestedCtx: Context | Null = null assert(!original.isClassDef) @@ -1002,18 +936,16 @@ class Namer { typer: Typer => // inspects a TypeRef's info, instead of simply dealiasing alias types. val isDerived = original.rhs.isInstanceOf[untpd.DerivedTypeTree] - val rhs = original.rhs match { + val rhs = original.rhs match case LambdaTypeTree(_, body) => body case rhs => rhs - } // For match types: approximate with upper bound while evaluating the rhs. - val dummyInfo2 = rhs match { + val dummyInfo2 = rhs match case MatchTypeTree(bound, _, _) if !bound.isEmpty => abstracted(TypeBounds.upper(typedAheadType(bound).tpe)) case _ => dummyInfo1 - } sym.info = dummyInfo2 // Treat the parameters of an upper type lambda bound on the RHS as non-variant. @@ -1043,10 +975,9 @@ class Namer { typer: Typer => info if (isDerived) sym.info = unsafeInfo - else { + else sym.info = NoCompleter sym.info = opaqueToBounds(checkNonCyclic(sym, unsafeInfo, reportErrors = true)) - } if sym.isOpaqueAlias then sym.typeRef.recomputeDenot() // make sure we see the new bounds from now on sym.resetFlag(Provisional) @@ -1061,9 +992,8 @@ class Namer { typer: Typer => sym.info end typeSig - } - class ClassCompleter(cls: ClassSymbol, original: TypeDef)(ictx: Context) extends Completer(original)(ictx) { + class ClassCompleter(cls: ClassSymbol, original: TypeDef)(ictx: Context) extends Completer(original)(ictx): withDecls(newScope(using ictx)) protected implicit val completerCtx: Context = localContext(cls) @@ -1075,11 +1005,10 @@ class Namer { typer: Typer => val TypeDef(name, impl @ Template(constr, _, self, _)) = original: @unchecked - private val (params, rest): (List[Tree], List[Tree]) = impl.body.span { + private val (params, rest): (List[Tree], List[Tree]) = impl.body.span: case td: TypeDef => td.mods.is(Param) case vd: ValDef => vd.mods.is(ParamAccessor) case _ => false - } def init(): Context = index(params) @@ -1107,7 +1036,7 @@ class Namer { typer: Typer => lazy val givenBound = importBound(selectors, isGiven = true) val targets = mutable.Set[Name]() - def canForward(mbr: SingleDenotation, alias: TermName): CanForward = { + def canForward(mbr: SingleDenotation, alias: TermName): CanForward = import CanForward.* val sym = mbr.symbol if !sym.isAccessibleFrom(pathType) then @@ -1133,7 +1062,6 @@ class Namer { typer: Typer => case None => Yes else Yes - } def foreachDefaultGetterOf(sym: TermSymbol, op: TermSymbol => Unit): Unit = var n = 0 @@ -1332,8 +1260,8 @@ class Namer { typer: Typer => val alt2 = tpd.methPart(forwarder1.rhs).tpe val cmp = alt1 match case alt1: TermRef => alt2 match - case alt2: TermRef => compare(alt1, alt2) - case _ => 0 + case alt2: TermRef => compare(alt1, alt2) + case _ => 0 case _ => 0 if cmp == 0 then report.error( @@ -1414,11 +1342,10 @@ class Namer { typer: Typer => process(stats1) case Nil => - def hasExport(stats: List[Tree]): Boolean = stats.exists { + def hasExport(stats: List[Tree]): Boolean = stats.exists: case _: Export => true case ExtMethods(_, stats1) => hasExport(stats1) case _ => false - } // Do a quick scan whether we need to process at all. This avoids creating // import contexts for nothing. if hasExport(rest) then @@ -1431,7 +1358,7 @@ class Namer { typer: Typer => * accessors, that's why the constructor needs to be completed before * the parent types are elaborated. */ - def completeConstructor(denot: SymDenotation): Unit = { + def completeConstructor(denot: SymDenotation): Unit = if (tempInfo != null) // Constructor has been completed already return @@ -1439,13 +1366,12 @@ class Namer { typer: Typer => val selfInfo: TypeOrSymbol = if (self.isEmpty) NoType - else if (cls.is(Module)) { + else if (cls.is(Module)) val moduleType = cls.owner.thisType select sourceModule if (self.name == nme.WILDCARD) moduleType else recordSym( newSymbol(cls, self.name, self.mods.flags, moduleType, coord = self.span), self) - } else createSymbol(self) val savedInfo = denot.infoOrCompleter @@ -1466,10 +1392,9 @@ class Namer { typer: Typer => tempInfo = denot.asClass.classInfo.integrateOpaqueMembers.asInstanceOf[TempClassInfo] denot.info = savedInfo - } /** The type signature of a ClassDef with given symbol */ - override def completeInCreationContext(denot: SymDenotation): Unit = { + override def completeInCreationContext(denot: SymDenotation): Unit = val parents = impl.parents /* The type of a parent constructor. Types constructor arguments @@ -1518,22 +1443,20 @@ class Namer { typer: Typer => * (3) The class is not final * (4) If the class is sealed, it is defined in the same compilation unit as the current class */ - def checkedParentType(parent: untpd.Tree): Type = { + def checkedParentType(parent: untpd.Tree): Type = val ptype = parentType(parent)(using completerCtx.superCallContext).dealiasKeepAnnots if (cls.isRefinementClass) ptype - else { + else val pt = checkClassType(ptype, parent.srcPos, traitReq = parent ne parents.head, stablePrefixReq = true) - if (pt.derivesFrom(cls)) { - val addendum = parent match { + if (pt.derivesFrom(cls)) + val addendum = parent match case Select(qual: Super, _) if Feature.migrateTo3 => "\n(Note that inheriting a class of the same name is no longer allowed)" case _ => "" - } report.error(CyclicInheritance(cls, addendum), parent.srcPos) defn.ObjectType - } - else { + else val pclazz = pt.typeSymbol if pclazz.is(Final) then report.error(ExtendFinalClass(cls, pclazz), cls.srcPos) @@ -1546,9 +1469,6 @@ class Namer { typer: Typer => cls.topLevelClass, parent.srcPos) pt - } - } - } /** If `parents` contains references to traits that have supertraits with implicit parameters * add those supertraits in linearization order unless they are already covered by other @@ -1598,15 +1518,13 @@ class Namer { typer: Typer => ) typr.println(i"completing $denot, parents = $parents%, %, parentTypes = $parentTypes%, %") - if (impl.derived.nonEmpty) { - val (derivingClass, derivePos) = original.removeAttachment(desugar.DerivingCompanion) match { + if (impl.derived.nonEmpty) + val (derivingClass, derivePos) = original.removeAttachment(desugar.DerivingCompanion) match case Some(pos) => (cls.companionClass.orElse(cls).asClass, pos) case None => (cls, impl.srcPos.startPos) - } val deriver = new Deriver(derivingClass, derivePos)(using localCtx) deriver.enterDerived(impl.derived) original.putAttachment(AttachedDeriver, deriver) - } denot.info = tempInfo.nn.finalized(parentTypes) tempInfo = null // The temporary info can now be garbage-collected @@ -1621,8 +1539,6 @@ class Namer { typer: Typer => processExports(using localCtx) defn.patchStdLibClass(cls) addConstructorProxies(cls) - } - } /** Possible actions to perform when deciding on a forwarder for a member */ private enum CanForward: @@ -1630,24 +1546,21 @@ class Namer { typer: Typer => case No(whyNot: String) case Skip // for members that have never forwarders - class SuspendCompleter extends LazyType, SymbolLoaders.SecondCompleter { + class SuspendCompleter extends LazyType, SymbolLoaders.SecondCompleter: final override def complete(denot: SymDenotation)(using Context): Unit = denot.resetFlag(Touched) // allow one more completion ctx.compilationUnit.suspend() - } /** Typecheck `tree` during completion using `typed`, and remember result in TypedAhead map */ - def typedAhead(tree: Tree, typed: untpd.Tree => tpd.Tree)(using Context): tpd.Tree = { + def typedAhead(tree: Tree, typed: untpd.Tree => tpd.Tree)(using Context): tpd.Tree = val xtree = expanded(tree) - xtree.getAttachment(TypedAhead) match { + xtree.getAttachment(TypedAhead) match case Some(ttree) => ttree case none => val ttree = typed(tree) if !ttree.isEmpty then xtree.putAttachment(TypedAhead, ttree) ttree - } - } def typedAheadType(tree: Tree, pt: Type = WildcardType)(using Context): tpd.Tree = typedAhead(tree, typer.typedType(_, pt)) @@ -1665,23 +1578,21 @@ class Namer { typer: Typer => if sym.isConstructor then sym.owner else sym /** Enter and typecheck parameter list */ - def completeParams(params: List[MemberDef])(using Context): Unit = { + def completeParams(params: List[MemberDef])(using Context): Unit = index(params) for (param <- params) typedAheadExpr(param) - } /** The signature of a module valdef. * This will compute the corresponding module class TypeRef immediately * without going through the defined type of the ValDef. This is necessary * to avoid cyclic references involving imports and module val defs. */ - def moduleValSig(sym: Symbol)(using Context): Type = { + def moduleValSig(sym: Symbol)(using Context): Type = val clsName = sym.name.moduleClassName val cls = ctx.effectiveScope.lookupAll(clsName) .find(_.is(ModuleClass)) .getOrElse(newStubSymbol(ctx.owner, clsName).assertingErrorsReported) ctx.owner.thisType.select(clsName, cls) - } /** The type signature of a ValDef or DefDef * @param mdef The definition @@ -1689,12 +1600,12 @@ class Namer { typer: Typer => * @param paramFn A wrapping function that produces the type of the * defined symbol, given its final return type */ - def valOrDefDefSig(mdef: ValOrDefDef, sym: Symbol, paramss: List[List[Symbol]], paramFn: Type => Type)(using Context): Type = { + def valOrDefDefSig(mdef: ValOrDefDef, sym: Symbol, paramss: List[List[Symbol]], paramFn: Type => Type)(using Context): Type = def inferredType = inferredResultType(mdef, sym, paramss, paramFn, WildcardType) lazy val termParamss = paramss.collect { case TermSymbols(vparams) => vparams } - val tptProto = mdef.tpt match { + val tptProto = mdef.tpt match case _: untpd.DerivedTypeTree => WildcardType case TypeTree() => @@ -1704,7 +1615,7 @@ class Namer { typer: Typer => if (isFullyDefined(tpe, ForceDegree.none)) tpe else typedAheadExpr(mdef.rhs, tpe).tpe case TypedSplice(tpt: TypeTree) if !isFullyDefined(tpt.tpe, ForceDegree.none) => - mdef match { + mdef match case mdef: DefDef if mdef.name == nme.ANON_FUN => // This case applies if the closure result type contains uninstantiated // type variables. In this case, constrain the closure result from below @@ -1733,19 +1644,16 @@ class Namer { typer: Typer => //println(i"lifting $rhsType over $termParamss -> $hygienicType = ${tpt.tpe}") //println(TypeComparer.explained { implicit ctx => hygienicType <:< tpt.tpe }) case _ => - } WildcardType case _ => WildcardType - } val mbrTpe = paramFn(checkSimpleKinded(typedAheadType(mdef.tpt, tptProto)).tpe) if (ctx.explicitNulls && mdef.mods.is(JavaDefined)) JavaNullInterop.nullifyMember(sym, mbrTpe, mdef.mods.isAllOf(JavaEnumValue)) else mbrTpe - } /** The type signature of a DefDef with given symbol */ - def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = { + def defDefSig(ddef: DefDef, sym: Symbol, completer: Namer#Completer)(using Context): Type = // Beware: ddef.name need not match sym.name if sym was freshened! val isConstructor = sym.name == nme.CONSTRUCTOR @@ -1790,7 +1698,6 @@ class Namer { typer: Typer => wrapMethType(effectiveResultType(sym, paramSymss)) else valOrDefDefSig(ddef, sym, paramSymss, wrapMethType) - } def inferredResultType( mdef: ValOrDefDef, @@ -1853,13 +1760,12 @@ class Namer { typer: Typer => sym.owner.companionClass.info.decl(nme.CONSTRUCTOR) else ctx.defContext(sym).denotNamed(original) - def paramProto(paramss: List[List[Type]], idx: Int): Type = paramss match { + def paramProto(paramss: List[List[Type]], idx: Int): Type = paramss match case params :: paramss1 => if (idx < params.length) params(idx) else paramProto(paramss1, idx - params.length) case nil => NoType - } val defaultAlts = meth.altsWith(_.hasDefaultParams) if (defaultAlts.length == 1) paramProto(defaultAlts.head.info.widen.paramInfoss, idx) @@ -1896,13 +1802,12 @@ class Namer { typer: Typer => if sym.isInlineMethod then rhsCtx = rhsCtx.addMode(Mode.InlineableBody) if sym.is(ExtensionMethod) then rhsCtx = rhsCtx.addMode(Mode.InExtensionMethod) val typeParams = paramss.collect { case TypeSymbols(tparams) => tparams }.flatten - if (typeParams.nonEmpty) { + if (typeParams.nonEmpty) // we'll be typing an expression from a polymorphic definition's body, // so we must allow constraining its type parameters // compare with typedDefDef, see tests/pos/gadt-inference.scala rhsCtx.setFreshGADTBounds rhsCtx.gadtState.addToConstraint(typeParams) - } def typedAheadRhs(pt: Type) = PrepareInlineable.dropInlineIfError(sym, @@ -1937,14 +1842,12 @@ class Namer { typer: Typer => //if (sym.name.toString == "y") println(i"rhs = $rhsType, cooked = $cookedRhsType") if (inherited.exists) if sym.isInlineVal then lhsType else inherited - else { + else if (sym.is(Implicit)) - mdef match { + mdef match case _: DefDef => missingType(sym, "result ") case _: ValDef if sym.owner.isType => missingType(sym, "") case _ => - } lhsType orElse WildcardType - } end inferredResultType } diff --git a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala index bde279c582e6..1432b12757ce 100644 --- a/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala +++ b/compiler/src/dotty/tools/dotc/typer/ProtoTypes.scala @@ -20,12 +20,12 @@ import TypeComparer.necessarySubType import scala.annotation.internal.sharable -object ProtoTypes { +object ProtoTypes: import tpd._ /** A trait defining an `isCompatible` method. */ - trait Compatibility { + trait Compatibility: /** Is there an implicit conversion from `tp` to `pt`? */ def viewExists(tp: Type, pt: Type)(using Context): Boolean @@ -81,7 +81,7 @@ object ProtoTypes { */ def constrainResult(mt: Type, pt: Type)(using Context): Boolean = val savedConstraint = ctx.typerState.constraint - val res = pt.widenExpr match { + val res = pt.widenExpr match case pt: FunProto => mt match case mt: MethodType => @@ -101,7 +101,6 @@ object ProtoTypes { necessarilyCompatible(mt, pt) case _ => true - } if !res then ctx.typerState.constraint = savedConstraint res @@ -112,24 +111,20 @@ object ProtoTypes { * achieved by replacing expected type parameters with wildcards. */ def constrainResult(meth: Symbol, mt: Type, pt: Type)(using Context): Boolean = - if (Inlines.isInlineable(meth)) { + if (Inlines.isInlineable(meth)) constrainResult(mt, wildApprox(pt)) true - } else constrainResult(mt, pt) - } - object NoViewsAllowed extends Compatibility { + object NoViewsAllowed extends Compatibility: override def viewExists(tp: Type, pt: Type)(using Context): Boolean = false - } /** A trait for prototypes that match all types */ - trait MatchAlways extends ProtoType { + trait MatchAlways extends ProtoType: def isMatchedBy(tp1: Type, keepConstraint: Boolean)(using Context): Boolean = true def map(tm: TypeMap)(using Context): ProtoType = this def fold[T](x: T, ta: TypeAccumulator[T])(using Context): T = x override def toString: String = getClass.toString - } /** A class marking ignored prototypes that can be revealed by `deepenProto` */ abstract case class IgnoredProto(ignored: Type) extends CachedGroundType with MatchAlways: @@ -166,7 +161,7 @@ object ProtoTypes { * [ ].name: proto */ abstract case class SelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean) - extends CachedProxyType with ProtoType with ValueTypeOrProto { + extends CachedProxyType with ProtoType with ValueTypeOrProto: /** Is the set of members of this type unknown, in the sense that we * cannot compute a non-trivial upper approximation? This is the case if: @@ -249,10 +244,9 @@ object ProtoTypes { override def deepenProtoTrans(using Context): SelectionProto = derivedSelectionProto(name, memberProto.deepenProtoTrans, compat) - override def computeHash(bs: Hashable.Binders): Int = { + override def computeHash(bs: Hashable.Binders): Int = val delta = (if (compat eq NoViewsAllowed) 1 else 0) | (if (privateOK) 2 else 0) addDelta(doHash(bs, name, memberProto), delta) - } override def equals(that: Any): Boolean = that match case that: SelectionProto => @@ -260,23 +254,19 @@ object ProtoTypes { case _ => false - override def eql(that: Type): Boolean = that match { + override def eql(that: Type): Boolean = that match case that: SelectionProto => (name eq that.name) && (memberProto eq that.memberProto) && (compat eq that.compat) && (privateOK == that.privateOK) case _ => false - } - } class CachedSelectionProto(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean) extends SelectionProto(name, memberProto, compat, privateOK) - object SelectionProto { - def apply(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean)(using Context): SelectionProto = { + object SelectionProto: + def apply(name: Name, memberProto: Type, compat: Compatibility, privateOK: Boolean)(using Context): SelectionProto = val selproto = new CachedSelectionProto(name, memberProto, compat, privateOK) if (compat eq NoViewsAllowed) unique(selproto) else selproto - } - } /** Create a selection proto-type, but only one level deep; * treat constructors specially @@ -306,7 +296,7 @@ object ProtoTypes { trait FunOrPolyProto extends ProtoType: // common trait of PolyProto and FunProto def applyKind: ApplyKind = ApplyKind.Regular - class FunProtoState { + class FunProtoState: /** The list of typed arguments, if all arguments are typed */ var typedArgs: List[Tree] = Nil @@ -322,7 +312,6 @@ object ProtoTypes { /** If true, the application of this prototype was canceled. */ var toDrop: Boolean = false - } /** A prototype for expressions that appear in function position * @@ -342,16 +331,15 @@ object ProtoTypes { override val applyKind: ApplyKind, state: FunProtoState = new FunProtoState, val constrainResultDeep: Boolean = false)(using protoCtx: Context) - extends UncachedGroundType with ApplyingProto with FunOrPolyProto { + extends UncachedGroundType with ApplyingProto with FunOrPolyProto: override def resultType(using Context): Type = resType - def isMatchedBy(tp: Type, keepConstraint: Boolean)(using Context): Boolean = { + def isMatchedBy(tp: Type, keepConstraint: Boolean)(using Context): Boolean = val args = typedArgs() def isPoly(tree: Tree) = tree.tpe.widenSingleton.isInstanceOf[PolyType] // See remark in normalizedCompatible for why we can't keep the constraint // if one of the arguments has a PolyType. typer.isApplicableType(tp, args, resultType, keepConstraint && !args.exists(isPoly)) - } def derivedFunProto( args: List[untpd.Tree] = this.args, @@ -370,11 +358,10 @@ object ProtoTypes { def allArgTypesAreCurrent()(using Context): Boolean = state.typedArg.size == args.length - private def isUndefined(tp: Type): Boolean = tp match { + private def isUndefined(tp: Type): Boolean = tp match case _: WildcardType => true case defn.FunctionOf(args, result, _) => args.exists(isUndefined) || isUndefined(result) case _ => false - } /** Did an argument produce an error when typing? This means: an error was reported * and a tree got an error type. Errors of adaptation whree a tree has a good type @@ -400,17 +387,16 @@ object ProtoTypes { false } - private def cacheTypedArg(arg: untpd.Tree, typerFn: untpd.Tree => Tree, force: Boolean)(using Context): Tree = { + private def cacheTypedArg(arg: untpd.Tree, typerFn: untpd.Tree => Tree, force: Boolean)(using Context): Tree = var targ = state.typedArg(arg) if (targ == null) - untpd.functionWithUnknownParamType(arg) match { + untpd.functionWithUnknownParamType(arg) match case Some(untpd.Function(args, _)) if !force => // If force = false, assume what we know about the parameter types rather than reporting an error. // That way we don't cause a "missing parameter" error in `typerFn(arg)` - val paramTypes = args map { + val paramTypes = args map: case ValDef(_, tpt, _) if !tpt.isEmpty => typer.typedType(tpt).typeOpt case _ => WildcardType - } targ = arg.withType(defn.FunctionOf(paramTypes, WildcardType)) case Some(_) if !force => targ = arg.withType(WildcardType) @@ -423,9 +409,7 @@ object ProtoTypes { else state.typedArg = state.typedArg.updated(arg, targ.nn) state.errorArgs -= arg - } targ.nn - } /** The typed arguments. This takes any arguments already typed using * `typedArg` into account. @@ -442,7 +426,7 @@ object ProtoTypes { else val passedCtx = ctx val passedTyperState = ctx.typerState - inContext(protoCtx.withUncommittedTyperState) { + inContext(protoCtx.withUncommittedTyperState): val protoTyperState = ctx.typerState val oldConstraint = protoTyperState.constraint val args1 = args.mapWithIndexConserve((arg, idx) => @@ -472,20 +456,18 @@ object ProtoTypes { // `instantiateSelected` can leave some type variables uninstantiated, // so we maximize them in a second pass. - newTvars.foreach { + newTvars.foreach: case tvar: TypeVar if !tvar.isInstantiated => tvar.instantiate(fromBelow = false) case _ => - } passedTyperState.mergeConstraintWith(protoTyperState)(using passedCtx) end if args1 - } /** Type single argument and remember the unadapted result in `myTypedArg`. * used to avoid repeated typings of trees when backtracking. */ - def typedArg(arg: untpd.Tree, formal: Type)(using Context): Tree = { + def typedArg(arg: untpd.Tree, formal: Type)(using Context): Tree = val wideFormal = formal.widenExpr val argCtx = if wideFormal eq formal then ctx @@ -497,9 +479,8 @@ object ProtoTypes { val targ1 = typer.adapt(targ, wideFormal, locked) if wideFormal eq formal then targ1 else checkNoWildcardCaptureForCBN(targ1) - } - def checkNoWildcardCaptureForCBN(targ1: Tree)(using Context): Tree = { + def checkNoWildcardCaptureForCBN(targ1: Tree)(using Context): Tree = if hasCaptureConversionArg(targ1.tpe) then val tp = stripCast(targ1).tpe errorTree(targ1, @@ -509,22 +490,20 @@ object ProtoTypes { |Assign it to a val and pass that instead. |""") else targ1 - } /** The type of the argument `arg`, or `NoType` if `arg` has not been typed before * or if `arg`'s typing produced a type error. */ - def typeOfArg(arg: untpd.Tree)(using Context): Type = { + def typeOfArg(arg: untpd.Tree)(using Context): Type = val t = state.typedArg(arg) if (t == null) NoType else t.tpe - } /** Cache the typed argument */ def cacheArg(arg: untpd.Tree, targ: Tree) = state.typedArg = state.typedArg.updated(arg, targ) /** The same proto-type but with all arguments combined in a single tuple */ - def tupledDual: FunProto = state.tupledDual match { + def tupledDual: FunProto = state.tupledDual match case pt: FunProto => pt case _ => @@ -533,7 +512,6 @@ object ProtoTypes { case _ => untpd.Tuple(args) :: Nil state.tupledDual = new FunProto(dualArgs, resultType)(typer, applyKind) tupledDual - } /** Somebody called the `tupledDual` method of this prototype */ def hasTupledDual: Boolean = state.tupledDual.isInstanceOf[FunProto] @@ -544,10 +522,9 @@ object ProtoTypes { * a method is `toString`. If in that case the type in the denotation is * parameterless, we compensate by dropping the application. */ - def markAsDropped(): Unit = { + def markAsDropped(): Unit = assert(args.isEmpty) state.toDrop = true - } def isDropped: Boolean = state.toDrop @@ -574,7 +551,6 @@ object ProtoTypes { override def withContext(newCtx: Context): ProtoType = if newCtx `eq` protoCtx then this else new FunProto(args, resType)(typer, applyKind, state)(using newCtx) - } /** A prototype for expressions that appear in function position * @@ -592,20 +568,18 @@ object ProtoTypes { * []: argType => resultType */ abstract case class ViewProto(argType: Type, resType: Type) - extends CachedGroundType with ApplyingProto { + extends CachedGroundType with ApplyingProto: override def resultType(using Context): Type = resType def isMatchedBy(tp: Type, keepConstraint: Boolean)(using Context): Boolean = - ctx.typer.isApplicableType(tp, argType :: Nil, resultType) || { - resType match { + ctx.typer.isApplicableType(tp, argType :: Nil, resultType) `||`: + resType match case selProto @ SelectionProto(selName: TermName, mbrType, _, _) => ctx.typer.hasExtensionMethodNamed(tp, selName, argType, mbrType) //.reporting(i"has ext $tp $name $argType $mbrType: $result") case _ => false - } - } def derivedViewProto(argType: Type, resultType: Type)(using Context): ViewProto = if ((argType eq this.argType) && (resultType eq this.resultType)) this @@ -627,20 +601,17 @@ object ProtoTypes { override def deepenProtoTrans(using Context): ViewProto = derivedViewProto(argType, resultType.deepenProtoTrans) - } - class CachedViewProto(argType: Type, resultType: Type) extends ViewProto(argType, resultType) { + class CachedViewProto(argType: Type, resultType: Type) extends ViewProto(argType, resultType): override def computeHash(bs: Hashable.Binders): Int = doHash(bs, argType, resultType) override def eql(that: Type): Boolean = that match case that: ViewProto => (argType eq that.argType) && (resType eq that.resType) case _ => false // equals comes from case class; no need to redefine - } - object ViewProto { + object ViewProto: def apply(argType: Type, resultType: Type)(using Context): ViewProto = unique(new CachedViewProto(argType, resultType)) - } class UnapplyFunProto(argType: Type, typer: Typer)(using Context) extends FunProto( untpd.TypedSplice(dummyTreeOfType(argType)(ctx.source)) :: Nil, WildcardType)(typer, applyKind = ApplyKind.Regular) @@ -649,7 +620,7 @@ object ProtoTypes { * * [] [targs] resultType */ - case class PolyProto(targs: List[Tree], resType: Type) extends UncachedGroundType with FunOrPolyProto { + case class PolyProto(targs: List[Tree], resType: Type) extends UncachedGroundType with FunOrPolyProto: override def resultType(using Context): Type = resType @@ -681,7 +652,6 @@ object ProtoTypes { override def deepenProtoTrans(using Context): PolyProto = derivedPolyProto(targs, resultType.deepenProtoTrans) - } /** A prototype for expressions [] that are known to be functions: * @@ -712,7 +682,7 @@ object ProtoTypes { tl: TypeLambda, owningTree: untpd.Tree, alwaysAddTypeVars: Boolean, nestingLevel: Int = ctx.nestingLevel - ): (TypeLambda, List[TypeTree]) = { + ): (TypeLambda, List[TypeTree]) = val state = ctx.typerState val addTypeVars = alwaysAddTypeVars || !owningTree.isEmpty if (tl.isInstanceOf[PolyType]) @@ -722,18 +692,16 @@ object ProtoTypes { def newTypeVars(tl: TypeLambda): List[TypeTree] = for (paramRef <- tl.paramRefs) - yield { + yield val tt = InferredTypeTree().withSpan(owningTree.span) val tvar = TypeVar(paramRef, state, nestingLevel) state.ownedVars += tvar tt.withType(tvar) - } val added = state.constraint.ensureFresh(tl) val tvars = if (addTypeVars) newTypeVars(added) else Nil TypeComparer.addToConstraint(added, tvars.tpes.asInstanceOf[List[TypeVar]]) (added, tvars) - } def constrained(tl: TypeLambda, owningTree: untpd.Tree)(using Context): (TypeLambda, List[TypeTree]) = constrained(tl, owningTree, @@ -817,17 +785,17 @@ object ProtoTypes { * of toString method. The problem is solved by dereferencing nullary method types if the corresponding * function type is not compatible with the prototype. */ - def normalize(tp: Type, pt: Type, followIFT: Boolean = true)(using Context): Type = { + def normalize(tp: Type, pt: Type, followIFT: Boolean = true)(using Context): Type = Stats.record("normalize") - tp.widenSingleton match { + tp.widenSingleton match case poly: PolyType => normalize(instantiateWithTypeVars(poly), pt) case mt: MethodType => if (mt.isImplicitMethod) normalize(resultTypeApprox(mt, wildcardOnly = true), pt) else if (mt.isResultDependent) tp - else { + else val rt = normalize(mt.resultType, pt) - pt match { + pt match case pt: IgnoredProto => tp case pt: ApplyingProto => @@ -836,22 +804,18 @@ object ProtoTypes { case _ => val ft = defn.FunctionOf(mt.paramInfos, rt) if mt.paramInfos.nonEmpty || (ft frozen_<:< pt) then ft else rt - } - } case et: ExprType => normalize(et.resultType, pt) case wtp => val iftp = defn.asContextFunctionType(wtp) if iftp.exists && followIFT then normalize(iftp.functionArgInfos.last, pt) else tp - } - } /** Approximate occurrences of parameter types and uninstantiated typevars * by wildcard types. */ private def wildApprox(tp: Type, theMap: WildApproxMap | Null, seen: Set[TypeParamRef], internal: Set[TypeLambda])(using Context): Type = - tp match { + tp match case tp: NamedType => // default case, inlined for speed val isPatternBoundTypeRef = tp.isInstanceOf[TypeRef] && tp.symbol.isPatternBound if (isPatternBoundTypeRef) WildcardType(tp.underlying.bounds) @@ -859,7 +823,7 @@ object ProtoTypes { else tp.derivedSelect(wildApprox(tp.prefix, theMap, seen, internal)) case tp @ AppliedType(tycon, args) => def wildArgs = args.mapConserve(arg => wildApprox(arg, theMap, seen, internal)) - wildApprox(tycon, theMap, seen, internal) match { + wildApprox(tycon, theMap, seen, internal) match case WildcardType(TypeBounds(lo, hi)) if hi.typeParams.hasSameLengthAs(args) => val args1 = wildArgs val lo1 = if lo.typeParams.hasSameLengthAs(args) then lo.appliedTo(args1) else lo @@ -867,7 +831,6 @@ object ProtoTypes { case WildcardType(_) => WildcardType case tycon1 => tp.derivedAppliedType(tycon1, wildArgs) - } case tp: RefinedType => // default case, inlined for speed tp.derivedRefinedType( wildApprox(tp.parent, theMap, seen, internal), @@ -888,18 +851,17 @@ object ProtoTypes { def approxPoly = if (ctx.mode.is(Mode.TypevarsMissContext)) unconstrainedApprox else - ctx.typerState.constraint.entry(tp) match { + ctx.typerState.constraint.entry(tp) match case bounds: TypeBounds => wildApproxBounds(bounds) case NoType => unconstrainedApprox case inst => wildApprox(inst, theMap, seen, internal) - } approxPoly case TermParamRef(mt, pnum) => WildcardType(TypeBounds.upper(wildApprox(mt.paramInfos(pnum), theMap, seen, internal))) case tp: TypeVar => wildApprox(tp.underlying, theMap, seen, internal) case tp: AndType => - def approxAnd = { + def approxAnd = val tp1a = wildApprox(tp.tp1, theMap, seen, internal) val tp2a = wildApprox(tp.tp2, theMap, seen, internal) def wildBounds(tp: Type) = @@ -908,17 +870,15 @@ object ProtoTypes { WildcardType(wildBounds(tp1a) & wildBounds(tp2a)) else tp.derivedAndType(tp1a, tp2a) - } approxAnd case tp: OrType => - def approxOr = { + def approxOr = val tp1a = wildApprox(tp.tp1, theMap, seen, internal) val tp2a = wildApprox(tp.tp2, theMap, seen, internal) if (tp1a.isInstanceOf[WildcardType] || tp2a.isInstanceOf[WildcardType]) WildcardType(tp1a.bounds | tp2a.bounds) else tp.derivedOrType(tp1a, tp2a) - } approxOr case tp: SelectionProto => tp.derivedSelectionProto(tp.name, wildApprox(tp.memberProto, theMap, seen, internal), NoViewsAllowed) @@ -950,25 +910,20 @@ object ProtoTypes { case _ => (if (theMap != null && seen.eq(theMap.seen)) theMap else new WildApproxMap(seen, internal)) .mapOver(tp) - } final def wildApprox(tp: Type)(using Context): Type = wildApprox(tp, null, Set.empty, Set.empty) @sharable object AssignProto extends UncachedGroundType with MatchAlways - private[ProtoTypes] class WildApproxMap(val seen: Set[TypeParamRef], val internal: Set[TypeLambda])(using Context) extends TypeMap { + private[ProtoTypes] class WildApproxMap(val seen: Set[TypeParamRef], val internal: Set[TypeLambda])(using Context) extends TypeMap: def apply(tp: Type): Type = wildApprox(tp, this, seen, internal) - } /** Dummy tree to be used as an argument of a FunProto or ViewProto type */ - object dummyTreeOfType { + object dummyTreeOfType: def apply(tp: Type)(implicit src: SourceFile): Tree = untpd.Literal(Constant(null)) withTypeUnchecked tp - def unapply(tree: untpd.Tree): Option[Type] = untpd.unsplice(tree) match { + def unapply(tree: untpd.Tree): Option[Type] = untpd.unsplice(tree) match case tree @ Literal(Constant(null)) => Some(tree.typeOpt) case _ => None - } - } private val sameTree = (t: untpd.Tree, n: Int) => t -} diff --git a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala index 070449e3ee96..40ea91998d79 100644 --- a/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala +++ b/compiler/src/dotty/tools/dotc/typer/QuotesAndSplices.scala @@ -27,7 +27,7 @@ import scala.collection.mutable /** Type quotes `'{ ... }` and splices `${ ... }` */ -trait QuotesAndSplices { +trait QuotesAndSplices: self: Typer => import tpd._ @@ -35,13 +35,12 @@ trait QuotesAndSplices { /** Translate `'{ e }` into `scala.quoted.Expr.apply(e)` and `'[T]` into `scala.quoted.Type.apply[T]` * while tracking the quotation level in the context. */ - def typedQuote(tree: untpd.Quote, pt: Type)(using Context): Tree = { + def typedQuote(tree: untpd.Quote, pt: Type)(using Context): Tree = record("typedQuote") - tree.body match { + tree.body match case _: untpd.Splice if tree.isTerm && !ctx.mode.is(Mode.Pattern) => report.warning("Canceled splice directly inside a quote. '{ ${ XYZ } } is equivalent to XYZ.", tree.srcPos) case _ => - } val quotes = inferImplicitArg(defn.QuotesClass.typeRef, tree.span) if quotes.tpe.isInstanceOf[SearchFailureType] then @@ -64,25 +63,22 @@ trait QuotesAndSplices { val quotedExpr = typedApply(exprQuoteTree, pt)(using quoteContext) match case Apply(TypeApply(fn, tpt :: Nil), quotedExpr :: Nil) => untpd.Quote(quotedExpr, Nil).withBodyType(tpt.tpe) makeInlineable(quotedExpr.select(nme.apply).appliedTo(quotes).withSpan(tree.span)) - } private def makeInlineable(tree: Tree)(using Context): Tree = - inContext(ctx.withOwner(ctx.owner.skipLocalOwners)) { + inContext(ctx.withOwner(ctx.owner.skipLocalOwners)): PrepareInlineable.makeInlineable(tree) - } /** Translate `${ t: Expr[T] }` into expression `t.splice` while tracking the quotation level in the context */ - def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = { + def typedSplice(tree: untpd.Splice, pt: Type)(using Context): Tree = record("typedSplice") checkSpliceOutsideQuote(tree) assert(!ctx.mode.is(Mode.QuotedPattern)) - tree.expr match { + tree.expr match case untpd.Quote(innerExpr, Nil) if innerExpr.isTerm => report.warning("Canceled quote directly inside a splice. ${ '{ XYZ } } is equivalent to XYZ.", tree.srcPos) return typed(innerExpr, pt) case _ => - } - if (level == 0) { + if (level == 0) // Mark the first inline method from the context as a macro def markAsMacro(c: Context): Unit = if (c.owner eq c.outer.owner) markAsMacro(c.outer) @@ -90,7 +86,6 @@ trait QuotesAndSplices { else if (!c.outer.owner.is(Package)) markAsMacro(c.outer) else assert(ctx.reporter.hasErrors) // Did not find inline def to mark as macro markAsMacro(ctx) - } // TODO typecheck directly (without `exprSplice`) val internalSplice = @@ -99,20 +94,18 @@ trait QuotesAndSplices { case tree @ Apply(TypeApply(_, tpt :: Nil), spliced :: Nil) if tree.symbol == defn.QuotedRuntime_exprSplice => cpy.Splice(tree)(spliced) case tree => tree - } - def typedSplicePattern(tree: untpd.SplicePattern, pt: Type)(using Context): Tree = { + def typedSplicePattern(tree: untpd.SplicePattern, pt: Type)(using Context): Tree = record("typedSplicePattern") if isFullyDefined(pt, ForceDegree.flipBottom) then def patternOuterContext(ctx: Context): Context = if (ctx.mode.is(Mode.QuotedPattern)) patternOuterContext(ctx.outer) else ctx - val typedArgs = tree.args.map { + val typedArgs = tree.args.map: case arg: untpd.Ident => typedExpr(arg) case arg => report.error("Open pattern expected an identifier", arg.srcPos) EmptyTree - } for arg <- typedArgs if arg.symbol.is(Mutable) do // TODO support these patterns. Possibly using scala.quoted.util.Var report.error("References to `var`s cannot be used in higher-order pattern", arg.srcPos) val argTypes = typedArgs.map(_.tpe.widenTermRefExpr) @@ -124,7 +117,6 @@ trait QuotesAndSplices { untpd.cpy.SplicePattern(tree)(pat, typedArgs).withType(pt) else errorTree(tree, em"Type must be fully defined.\nConsider annotating the splice using a type ascription:\n ($tree: XYZ).", tree.body.srcPos) - } def typedHole(tree: untpd.Hole, pt: Type)(using Context): Tree = throw new UnsupportedOperationException("cannot type check a Hole node") @@ -136,7 +128,7 @@ trait QuotesAndSplices { * `Ti` is the type of the argument `argi` and R if the type of the prototype. * The prototype must be fully defined to be able to infer the type of `R`. */ - def typedAppliedSplice(tree: untpd.Apply, pt: Type)(using Context): Tree = { + def typedAppliedSplice(tree: untpd.Apply, pt: Type)(using Context): Tree = assert(ctx.mode.is(Mode.QuotedPattern)) val untpd.Apply(splice: untpd.SplicePattern, args) = tree: @unchecked def isInBraces: Boolean = splice.span.end != splice.body.span.end @@ -149,7 +141,6 @@ trait QuotesAndSplices { if args.isEmpty then report.error("Missing arguments for open pattern", tree.srcPos) typedSplicePattern(untpd.cpy.SplicePattern(tree)(splice.body, args), pt) - } /** Type a pattern variable name `t` in quote pattern as `${given t$giveni: Type[t @ _]}`. * The resulting pattern is the split in `splitQuotePattern`. @@ -199,7 +190,7 @@ trait QuotesAndSplices { * ) * ``` */ - private def splitQuotePattern(quoted: Tree)(using Context): (Map[Symbol, Bind], Tree, List[Tree]) = { + private def splitQuotePattern(quoted: Tree)(using Context): (Map[Symbol, Bind], Tree, List[Tree]) = val ctx0 = ctx val typeBindings: collection.mutable.Map[Symbol, Bind] = collection.mutable.Map.empty @@ -210,22 +201,21 @@ trait QuotesAndSplices { Bind(bsym, untpd.Ident(nme.WILDCARD).withType(bindingBounds)).withSpan(quoted.span) }) - object splitter extends tpd.TreeMap { + object splitter extends tpd.TreeMap: private var variance: Int = 1 - inline private def atVariance[T](v: Int)(op: => T): T = { + inline private def atVariance[T](v: Int)(op: => T): T = val saved = variance variance = v val res = op variance = saved res - } val patBuf = new mutable.ListBuffer[Tree] val freshTypePatBuf = new mutable.ListBuffer[Tree] val freshTypeBindingsBuff = new mutable.ListBuffer[Tree] val typePatBuf = new mutable.ListBuffer[Tree] - override def transform(tree: Tree)(using Context) = tree match { + override def transform(tree: Tree)(using Context) = tree match case Typed(splice @ SplicePattern(pat, Nil), tpt) if !tpt.tpe.derivesFrom(defn.RepeatedParamClass) => transform(tpt) // Collect type bindings transform(splice) @@ -256,10 +246,9 @@ trait QuotesAndSplices { super.transform(tree) case tree @ AppliedTypeTree(tpt, args) => val args1: List[Tree] = args.zipWithConserve(tpt.tpe.typeParams.map(_.paramVarianceSign)) { (arg, v) => - arg.tpe match { + arg.tpe match case _: TypeBounds => transform(arg) case _ => atVariance(variance * v)(transform(arg)) - } } cpy.AppliedTypeTree(tree)(transform(tpt), args1) case tree: NamedDefTree => @@ -281,9 +270,8 @@ trait QuotesAndSplices { EmptyTree case _ => super.transform(tree) - } - private def transformTypeBindingTypeDef(nameOfSyntheticGiven: TermName, tdef: TypeDef, buff: mutable.Builder[Tree, List[Tree]])(using Context): Tree = { + private def transformTypeBindingTypeDef(nameOfSyntheticGiven: TermName, tdef: TypeDef, buff: mutable.Builder[Tree, List[Tree]])(using Context): Tree = if ctx.mode.is(Mode.InPatternAlternative) then report.error(IllegalVariableInPatternAlternative(tdef.symbol.name), tdef.srcPos) if variance == -1 then @@ -293,8 +281,6 @@ trait QuotesAndSplices { val sym = newPatternBoundSymbol(nameOfSyntheticGiven, bindingTypeTpe, tdef.span, flags = ImplicitVal)(using ctx0) buff += Bind(sym, untpd.Ident(nme.WILDCARD).withType(bindingTypeTpe)).withSpan(tdef.span) super.transform(tdef) - } - } val shape0 = splitter.transform(quoted) val patterns = (splitter.freshTypePatBuf.iterator ++ splitter.typePatBuf.iterator ++ splitter.patBuf.iterator).toList val freshTypeBindings = splitter.freshTypeBindingsBuff.result() @@ -305,22 +291,18 @@ trait QuotesAndSplices { ) val shape2 = if (freshTypeBindings.isEmpty) shape1 - else { + else val isFreshTypeBindings = freshTypeBindings.map(_.symbol).toSet - val typeMap = new TypeMap() { - def apply(tp: Type): Type = tp match { + val typeMap = new TypeMap(): + def apply(tp: Type): Type = tp match case tp: TypeRef if tp.symbol.isTypeSplice => val tp1 = tp.dealias if (isFreshTypeBindings(tp1.typeSymbol)) tp1 else tp case tp => mapOver(tp) - } - } new TreeTypeMap(typeMap = typeMap).transform(shape1) - } (typeBindings.toMap, shape2, patterns) - } /** Type a quote pattern `case '{ } =>` qiven the a current prototype. Typing the pattern * will also transform it into a call to `scala.internal.quoted.Expr.unapply`. @@ -367,7 +349,7 @@ trait QuotesAndSplices { * ) => ... * ``` */ - private def typedQuotePattern(tree: untpd.Quote, pt: Type, quotes: Tree)(using Context): Tree = { + private def typedQuotePattern(tree: untpd.Quote, pt: Type, quotes: Tree)(using Context): Tree = val quoted = tree.body if quoted.isTerm && !pt.derivesFrom(defn.QuotedExprClass) then report.error("Quote pattern can only match scrutinees of type scala.quoted.Expr", tree.srcPos) @@ -375,10 +357,9 @@ trait QuotesAndSplices { report.error("Quote pattern can only match scrutinees of type scala.quoted.Type", tree.srcPos) val exprPt = pt.baseType(if quoted.isType then defn.QuotedTypeClass else defn.QuotedExprClass) - val quotedPt = exprPt.argInfos.headOption match { + val quotedPt = exprPt.argInfos.headOption match case Some(argPt: ValueType) => argPt // excludes TypeBounds case _ => defn.AnyType - } val quoted0 = desugar.quotedPattern(quoted, untpd.TypedSplice(TypeTree(quotedPt))) val quoteCtx = quoteContext.addMode(Mode.QuotedPattern).retractMode(Mode.Pattern) val quoted1 = @@ -387,23 +368,21 @@ trait QuotesAndSplices { val (typeBindings, shape, splices) = splitQuotePattern(quoted1) - class ReplaceBindings extends TypeMap() { - override def apply(tp: Type): Type = tp match { + class ReplaceBindings extends TypeMap(): + override def apply(tp: Type): Type = tp match case tp: TypeRef => val tp1 = if (tp.symbol.isTypeSplice) tp.dealias else tp mapOver(typeBindings.get(tp1.typeSymbol).fold(tp)(_.symbol.typeRef)) case tp => mapOver(tp) - } - } val replaceBindings = new ReplaceBindings val patType = defn.tupleType(splices.tpes.map(tpe => replaceBindings(tpe.widen))) val typeBindingsTuple = tpd.hkNestedPairsTypeTree(typeBindings.values.toList) - val replaceBindingsInTree = new TreeMap { + val replaceBindingsInTree = new TreeMap: private var bindMap = Map.empty[Symbol, Symbol] override def transform(tree: tpd.Tree)(using Context): tpd.Tree = - tree match { + tree match case tree: Bind => val sym = tree.symbol val newInfo = replaceBindings(sym.info) @@ -412,14 +391,10 @@ trait QuotesAndSplices { Bind(newSym, transform(tree.body)).withSpan(sym.span) case _ => super.transform(tree).withType(replaceBindingsInType(tree.tpe)) - } - private val replaceBindingsInType = new ReplaceBindings { - override def apply(tp: Type): Type = tp match { + private val replaceBindingsInType = new ReplaceBindings: + override def apply(tp: Type): Type = tp match case tp: TermRef => bindMap.get(tp.termSymbol).fold[Type](tp)(_.typeRef) case tp => super.apply(tp) - } - } - } val splicePat = if splices.isEmpty then ref(defn.EmptyTupleModule.termRef) @@ -438,5 +413,3 @@ trait QuotesAndSplices { implicits = quotedPattern :: Nil, patterns = splicePat :: Nil, proto = quoteClass.typeRef.appliedTo(replaceBindings(quoted1.tpe) & quotedPt)) - } -} diff --git a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala index 1fa6e967fbe1..c765c0a6c817 100644 --- a/compiler/src/dotty/tools/dotc/typer/ReTyper.scala +++ b/compiler/src/dotty/tools/dotc/typer/ReTyper.scala @@ -22,26 +22,24 @@ import staging.StagingLevel.* * * Otherwise, everything is as in Typer. */ -class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking { +class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking: import tpd._ private def assertTyped(tree: untpd.Tree)(using Context): Unit = assert(tree.hasType, i"$tree ${tree.getClass} ${tree.uniqueId}") /** Checks that the given tree has been typed */ - protected def promote(tree: untpd.Tree)(using Context): tree.ThisTree[Type] = { + protected def promote(tree: untpd.Tree)(using Context): tree.ThisTree[Type] = assertTyped(tree) tree.withType(tree.typeOpt) - } override def typedIdent(tree: untpd.Ident, pt: Type)(using Context): Tree = promote(tree) - override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { + override def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = assertTyped(tree) val qual1 = withoutMode(Mode.Pattern)(typed(tree.qualifier, AnySelectionProto)) untpd.cpy.Select(tree)(qual1, tree.name).withType(tree.typeOpt) - } override def typedLiteral(tree: untpd.Literal)(implicit ctc: Context): Tree = promote(tree) @@ -55,18 +53,16 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def typedImport(tree: untpd.Import)(using Context): Tree = promote(tree) - override def typedTyped(tree: untpd.Typed, pt: Type)(using Context): Tree = { + override def typedTyped(tree: untpd.Typed, pt: Type)(using Context): Tree = assertTyped(tree) val tpt1 = checkSimpleKinded(typedType(tree.tpt)) - val expr1 = tree.expr match { + val expr1 = tree.expr match case id: untpd.Ident if (ctx.mode is Mode.Pattern) && untpd.isVarPattern(id) && (id.name == nme.WILDCARD || id.name == nme.WILDCARD_STAR) => tree.expr.withType(tpt1.tpe) case _ => typed(tree.expr) - } val result = untpd.cpy.Typed(tree)(expr1, tpt1).withType(tree.typeOpt) if ctx.mode.isExpr then result.withNotNullInfo(expr1.notNullInfo) else result - } override def typedTypeTree(tree: untpd.TypeTree, pt: Type)(using Context): TypeTree = promote(tree) @@ -77,20 +73,18 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def typedExport(exp: untpd.Export)(using Context): Export = promote(exp) - override def typedBind(tree: untpd.Bind, pt: Type)(using Context): Bind = { + override def typedBind(tree: untpd.Bind, pt: Type)(using Context): Bind = assertTyped(tree) val body1 = typed(tree.body, pt) untpd.cpy.Bind(tree)(tree.name, body1).withType(tree.typeOpt) - } - override def typedUnApply(tree: untpd.UnApply, selType: Type)(using Context): UnApply = { + override def typedUnApply(tree: untpd.UnApply, selType: Type)(using Context): UnApply = val fun1 = // retract PatternOrTypeBits like in typedExpr withoutMode(Mode.PatternOrTypeBits)(typedUnadapted(tree.fun, AnyFunctionProto)) val implicits1 = tree.implicits.map(typedExpr(_)) val patterns1 = tree.patterns.mapconserve(pat => typed(pat, pat.typeOpt)) untpd.cpy.UnApply(tree)(fun1, implicits1, patterns1).withType(tree.typeOpt) - } override def typedUnApply(tree: untpd.Apply, selType: Type)(using Context): Tree = typedApply(tree, selType) @@ -131,13 +125,12 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override def ensureConstrCall(cls: ClassSymbol, parent: Tree, psym: Symbol)(using Context): Tree = parent - override def handleUnexpectedFunType(tree: untpd.Apply, fun: Tree)(using Context): Tree = fun.tpe match { + override def handleUnexpectedFunType(tree: untpd.Apply, fun: Tree)(using Context): Tree = fun.tpe match case mt: MethodType => val args: List[Tree] = tree.args.zipWithConserve(mt.paramInfos)(typedExpr) assignType(untpd.cpy.Apply(tree)(fun, args), fun, args) case _ => super.handleUnexpectedFunType(tree, fun) - } override def addCanThrowCapabilities(expr: untpd.Tree, cases: List[CaseDef])(using Context): untpd.Tree = expr @@ -161,4 +154,3 @@ class ReTyper(nestingLevel: Int = 0) extends Typer(nestingLevel) with ReChecking override protected def checkEqualityEvidence(tree: tpd.Tree, pt: Type)(using Context): Unit = () override protected def matchingApply(methType: MethodOrPoly, pt: FunProto)(using Context): Boolean = true override protected def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree = promote(call) -} diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index fe28a8b18833..874c194539df 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -21,19 +21,18 @@ import config.Printers.refcheck import reporting._ import Constants.Constant -object RefChecks { +object RefChecks: import tpd._ val name: String = "refchecks" val description: String = "checks related to abstract members and overriding" - private val defaultMethodFilter = new NameFilter { + private val defaultMethodFilter = new NameFilter: def apply(pre: Type, name: Name)(using Context): Boolean = name.is(DefaultGetterName) def isStable = true - } /** Only one overloaded alternative is allowed to define default arguments */ - private def checkOverloadedRestrictions(clazz: Symbol)(using Context): Unit = { + private def checkOverloadedRestrictions(clazz: Symbol)(using Context): Unit = // Using the default getters (such as methodName$default$1) as a cheap way of // finding methods with default parameters. This way, we can limit the members to // those with the DEFAULTPARAM flag, and infer the methods. Looking for the methods @@ -44,16 +43,15 @@ object RefChecks { defaultGetterClass <- List(clazz, clazz.companionModule.moduleClass); if defaultGetterClass.isClass } - { val defaultGetterNames = defaultGetterClass.asClass.memberNames(defaultMethodFilter) val defaultMethodNames = defaultGetterNames map { _ replace { case DefaultGetterName(methName, _) => methName }} - for (name <- defaultMethodNames) { + for (name <- defaultMethodNames) val methods = clazz.info.member(name).alternatives.map(_.symbol) val haveDefaults = methods.filter(_.hasDefaultParams) - if (haveDefaults.length > 1) { + if (haveDefaults.length > 1) val owners = haveDefaults map (_.owner) // constructors of different classes are allowed to have defaults if (haveDefaults.exists(x => !x.isConstructor) || owners.distinct.size < haveDefaults.size) @@ -62,16 +60,12 @@ object RefChecks { if owners.forall(_ == clazz) then "." else i".\nThe members with defaults are defined in ${owners.map(_.showLocated).mkString("", " and ", ".")}"}", clazz.srcPos) - } - } - } // Check for doomed attempt to overload applyDynamic if (clazz derivesFrom defn.DynamicClass) for (case (_, m1 :: m2 :: _) <- (clazz.info member nme.applyDynamic).alternatives groupBy (_.symbol.typeParams.length)) report.error("implementation restriction: applyDynamic cannot be overloaded except by methods with different numbers of type parameters, e.g. applyDynamic[T1](method: String)(arg: T1) and applyDynamic[T1, T2](method: String)(arg1: T1, arg2: T2)", m1.symbol.srcPos) - } /** The this-type of `cls` which should be used when looking at the types of * inherited members. If `cls` has a non-trivial self type, this returns a skolem @@ -82,12 +76,11 @@ object RefChecks { * This one used to succeed only if forwarding parameters is on. * (Forwarding tends to hide problems by binding parameter names). */ - private def upwardsThisType(cls: Symbol)(using Context) = cls.info match { + private def upwardsThisType(cls: Symbol)(using Context) = cls.info match case ClassInfo(_, _, _, _, tp: Type) if (tp ne cls.typeRef) && !cls.isOneOf(FinalOrModuleClass) => SkolemType(cls.appliedRef).withName(nme.this_) case _ => cls.thisType - } /** - Check that self type of `cls` conforms to self types of all `parents` as seen from * `cls.thisType` @@ -99,7 +92,7 @@ object RefChecks { * is the intersection of the capture sets of all its parents */ def checkSelfAgainstParents(cls: ClassSymbol, parents: List[Symbol])(using Context): Unit = - withMode(Mode.CheckBoundsOrSelfType) { + withMode(Mode.CheckBoundsOrSelfType): val cinfo = cls.classInfo def checkSelfConforms(other: ClassSymbol) = @@ -111,14 +104,13 @@ object RefChecks { for psym <- parents do checkSelfConforms(psym.asClass) - } end checkSelfAgainstParents /** Check that self type of this class conforms to self types of parents * and required classes. Also check that only `enum` constructs extend * `java.lang.Enum` and no user-written class extends ContextFunctionN. */ - def checkParents(cls: Symbol, parentTrees: List[Tree])(using Context): Unit = cls.info match { + def checkParents(cls: Symbol, parentTrees: List[Tree])(using Context): Unit = cls.info match case cinfo: ClassInfo => val psyms = cls.asClass.parentSyms checkSelfAgainstParents(cls.asClass, psyms) @@ -135,14 +127,13 @@ object RefChecks { // however the no-arg constructor is forbidden, we must look at the parent trees to see // which overload is called. val javaEnumCtor = defn.JavaEnumClass.primaryConstructor - parentTrees.exists { + parentTrees.exists: case parent @ tpd.Apply(tpd.TypeApply(fn, _), _) if fn.tpe.termSymbol eq javaEnumCtor => // here we are simulating the error for missing arguments to a constructor. report.error(JavaEnumParentArgs(parent.tpe), cls.sourcePos) true case _ => false - } if psyms.exists(defn.isContextFunctionClass) then report.error(CannotExtendContextFunction(cls), cls.sourcePos) @@ -168,7 +159,6 @@ object RefChecks { for case app: Apply <- parentTrees do checkParamInits(app) case _ => - } /** Disallow using trait parameters as prefix for its parents. * @@ -176,19 +166,18 @@ object RefChecks { * Otherwise, outer NPE may happen, see tests/neg/i5083.scala */ private def checkParentPrefix(cls: Symbol, parent: Tree)(using Context): Unit = - parent.tpe.typeConstructor match { + parent.tpe.typeConstructor match case TypeRef(ref: TermRef, _) => val paramRefs = ref.namedPartsWith(ntp => ntp.symbol.enclosingClass == cls) if (paramRefs.nonEmpty) report.error(TraitParameterUsedAsParentPrefix(cls), parent.srcPos) case _ => - } /** Check that a class and its companion object to not both define * a class or module with same name */ private def checkCompanionNameClashes(cls: Symbol)(using Context): Unit = - if (!cls.owner.is(ModuleClass)) { + if (!cls.owner.is(ModuleClass)) def clashes(sym: Symbol) = sym.isClass && sym.name.stripModuleClassSuffix == cls.name.stripModuleClassSuffix @@ -197,7 +186,6 @@ object RefChecks { others.foreach { other => report.error(ClassAndCompanionNameClash(cls, other), cls.srcPos) } - } // Override checking ------------------------------------------------------------ @@ -298,7 +286,7 @@ object RefChecks { * @param makeOverridingPairsChecker A function for creating a OverridePairsChecker instance * from the class symbol and the self type */ - def checkAllOverrides(clazz: ClassSymbol, makeOverridingPairsChecker: ((ClassSymbol, Type) => Context ?=> OverridingPairsChecker) | Null = null)(using Context): Unit = { + def checkAllOverrides(clazz: ClassSymbol, makeOverridingPairsChecker: ((ClassSymbol, Type) => Context ?=> OverridingPairsChecker) | Null = null)(using Context): Unit = val self = clazz.thisType val upwardsSelf = upwardsThisType(clazz) var hasErrors = false @@ -308,21 +296,19 @@ object RefChecks { val mixinOverrideErrors = new mutable.ListBuffer[MixinOverrideError]() def printMixinOverrideErrors(): Unit = - mixinOverrideErrors.toList match { + mixinOverrideErrors.toList match case Nil => case List(MixinOverrideError(_, msg)) => report.error(msg, clazz.srcPos) case MixinOverrideError(member, msg) :: others => val others1 = others.map(_.member).filter(_.name != member.name).distinct - def othersMsg = { + def othersMsg = val others1 = others.map(_.member) .filter(_.name != member.name) .map(_.show).distinct if (others1.isEmpty) "" else i";\nother members with override errors are:: $others1%, %" - } report.error(msg.append(othersMsg), clazz.srcPos) - } def infoString(sym: Symbol) = err.infoString(sym, self, showLocation = sym.owner != clazz) @@ -371,7 +357,7 @@ object RefChecks { * Type members are always assumed to match. */ def trueMatch: Boolean = - member.isType || withMode(Mode.IgnoreCaptures) { + member.isType || withMode(Mode.IgnoreCaptures): // `matches` does not perform box adaptation so the result here would be // spurious during capture checking. // @@ -380,16 +366,14 @@ object RefChecks { // This should be safe since the compatibility under box adaptation is already // checked. memberTp(self).matches(otherTp(self)) - } def emitOverrideError(fullmsg: Message) = - if (!(hasErrors && member.is(Synthetic) && member.is(Module))) { + if (!(hasErrors && member.is(Synthetic) && member.is(Module))) // suppress errors relating toi synthetic companion objects if other override // errors (e.g. relating to the companion class) have already been reported. if (member.owner == clazz) report.error(fullmsg, member.srcPos) else mixinOverrideErrors += new MixinOverrideError(member, fullmsg) hasErrors = true - } def overrideError(msg: String, compareTypes: Boolean = false) = if trueMatch && noErrorType then @@ -405,13 +389,12 @@ object RefChecks { desugar.isDesugaredCaseClassMethodName(member.name) || // such names are added automatically, can't have an override preset. sym.is(Module)) // synthetic companion - def overrideAccessError() = { + def overrideAccessError() = report.log(i"member: ${member.showLocated} ${member.flagsString}") // DEBUG report.log(i"other: ${other.showLocated} ${other.flagsString}") // DEBUG val otherAccess = (other.flags & AccessFlags).flagsString overrideError("has weaker access privileges; it should be " + (if (otherAccess == "") "public" else "at least " + otherAccess)) - } def overrideTargetNameError() = val otherTargetName = i"@targetName(${other.targetName})" @@ -425,10 +408,9 @@ object RefChecks { //Console.println(infoString(member) + " overrides " + infoString(other) + " in " + clazz);//DEBUG /* Is the intersection between given two lists of overridden symbols empty? */ - def intersectionIsEmpty(syms1: Iterator[Symbol], syms2: Iterator[Symbol]) = { + def intersectionIsEmpty(syms1: Iterator[Symbol], syms2: Iterator[Symbol]) = val set2 = syms2.toSet !(syms1 exists (set2 contains _)) - } // o: public | protected | package-protected (aka java's default access) // ^-may be overridden by member with access privileges-v @@ -553,14 +535,14 @@ object RefChecks { printMixinOverrideErrors() // Verifying a concrete class has nothing unimplemented. - if (!clazz.isOneOf(AbstractOrTrait)) { + if (!clazz.isOneOf(AbstractOrTrait)) val abstractErrors = new mutable.ListBuffer[String] def abstractErrorMessage = // a little formatting polish if (abstractErrors.size <= 2) abstractErrors.mkString(" ") else abstractErrors.tail.mkString(abstractErrors.head + ":\n", "\n", "") - def abstractClassError(mustBeMixin: Boolean, msg: String): Unit = { + def abstractClassError(mustBeMixin: Boolean, msg: String): Unit = def prelude = ( if (clazz.isAnonymousClass || clazz.is(Module)) "object creation impossible" else if (mustBeMixin) s"$clazz needs to be a mixin" @@ -570,17 +552,15 @@ object RefChecks { if (abstractErrors.isEmpty) abstractErrors ++= List(prelude, msg) else abstractErrors += msg - } def hasJavaErasedOverriding(sym: Symbol): Boolean = !erasurePhase.exists || // can't do the test, assume the best - atPhase(erasurePhase.next) { + atPhase(erasurePhase.next): clazz.info.nonPrivateMember(sym.name).hasAltWith { alt => alt.symbol.is(JavaDefined, butNot = Deferred) && !sym.owner.derivesFrom(alt.symbol.owner) && alt.matches(sym) } - } def ignoreDeferred(mbr: Symbol) = mbr.isType @@ -609,20 +589,19 @@ object RefChecks { buf.toList // 2. Check that only abstract classes have deferred members - def checkNoAbstractMembers(): Unit = { + def checkNoAbstractMembers(): Unit = // Avoid spurious duplicates: first gather any missing members. val missing = missingTermSymbols // Group missing members by the name of the underlying symbol, // to consolidate getters and setters. val grouped = missing.groupBy(_.underlyingSymbol.name) - val missingMethods = grouped.toList flatMap { + val missingMethods = grouped.toList flatMap: case (name, syms) => val withoutSetters = syms filterNot (_.isSetter) if (withoutSetters.nonEmpty) withoutSetters else syms - } - def stubImplementations: List[String] = { + def stubImplementations: List[String] = // Grouping missing methods by the declaring class val regrouped = missingMethods.groupBy(_.owner).toList def membersStrings(members: List[Symbol]) = @@ -634,11 +613,10 @@ object RefChecks { case (owner, members) => ("// Members declared in " + owner.fullName) +: membersStrings(members) :+ "" }).init - } // If there are numerous missing methods, we presume they are aware of it and // give them a nicely formatted set of method signatures for implementing. - if (missingMethods.size > 1) { + if (missingMethods.size > 1) abstractClassError(false, "it has " + missingMethods.size + " unimplemented members.") val preface = """|/** As seen from %s, the missing signatures are as follows. @@ -647,9 +625,8 @@ object RefChecks { |""".stripMargin.format(clazz) abstractErrors += stubImplementations.map(" " + _ + "\n").mkString(preface, "", "") return - } - for (member <- missing) { + for (member <- missing) def showDclAndLocation(sym: Symbol) = s"${sym.showDcl} in ${sym.owner.showLocated}" def undefined(msg: String) = @@ -658,7 +635,7 @@ object RefChecks { // Give a specific error message for abstract vars based on why it fails: // It could be unimplemented, have only one accessor, or be uninitialized. - if (underlying.is(Mutable)) { + if (underlying.is(Mutable)) val isMultiple = grouped.getOrElse(underlying.name, Nil).size > 1 // If both getter and setter are missing, squelch the setter error. @@ -667,8 +644,7 @@ object RefChecks { if (member.isSetter) "\n(Note that an abstract var requires a setter in addition to the getter)" else if (member.isGetter && !isMultiple) "\n(Note that an abstract var requires a getter in addition to the setter)" else err.abstractVarMessage(member)) - } - else if (underlying.is(Method)) { + else if (underlying.is(Method)) // If there is a concrete method whose name matches the unimplemented // abstract method, and a cursory examination of the difference reveals // something obvious to us, let's make it more obvious to them. @@ -679,13 +655,13 @@ object RefChecks { m.info.firstParamTypes.length == abstractParams.length } - matchingArity match { + matchingArity match // So far so good: only one candidate method case concrete :: Nil => val mismatches = abstractParams.zip(concrete.info.firstParamTypes) .filterNot { case (x, y) => x =:= y } - mismatches match { + mismatches match // Only one mismatched parameter: say something useful. case (pa, pc) :: Nil => val abstractSym = pa.typeSymbol @@ -694,13 +670,12 @@ object RefChecks { s"${c1.showLocated} is a subclass of ${c2.showLocated}, but method parameter types must match exactly." val addendum = if (abstractSym == concreteSym) - (pa.typeConstructor, pc.typeConstructor) match { + (pa.typeConstructor, pc.typeConstructor) match case (TypeRef(pre1, _), TypeRef(pre2, _)) => if (pre1 =:= pre2) "their type parameters differ" else "their prefixes (i.e. enclosing instances) differ" case _ => "" - } else if (abstractSym isSubClass concreteSym) subclassMsg(abstractSym, concreteSym) else if (concreteSym isSubClass abstractSym) @@ -716,18 +691,13 @@ object RefChecks { undefined( if concrete.symbol.is(AbsOverride) then s"\n(The class implements ${showDclAndLocation(concrete.symbol)} but that definition still needs an implementation)" - else + else s"\n(The class implements a member with a different type: ${showDclAndLocation(concrete.symbol)})") - } case Nil => undefined("") case concretes => undefined(s"\n(The class implements members with different types: ${concretes.map(c => showDclAndLocation(c.symbol))}%\n %)") - } - } else undefined("") - } - } // 3. Check that concrete classes do not have deferred definitions // that are not implemented in a subclass. @@ -737,9 +707,9 @@ object RefChecks { // class D extends C { def m: Int } // // (3) is violated but not (2). - def checkNoAbstractDecls(bc: Symbol): Unit = { + def checkNoAbstractDecls(bc: Symbol): Unit = for (decl <- bc.info.decls) - if (decl.is(Deferred)) { + if (decl.is(Deferred)) val impl = withMode(Mode.IgnoreCaptures)(decl.matchingMember(clazz.thisType)) if (impl == NoSymbol || decl.owner.isSubClass(impl.owner)) && !ignoreDeferred(decl) @@ -749,17 +719,15 @@ object RefChecks { report.log(i"${clazz.thisType.memberInfo(decl)}") // DEBUG abstractClassError(false, "there is a deferred declaration of " + infoString(decl) + " which is not implemented in a subclass" + err.abstractVarMessage(decl)) - } if (bc.asClass.superClass.is(Abstract)) checkNoAbstractDecls(bc.asClass.superClass) - } // Check that every term member of this concrete class has a symbol that matches the member's type // Member types are computed by intersecting the types of all members that have the same name // and signature. But a member selection will pick one particular implementation, according to // the rules of overriding and linearization. This method checks that the implementation has indeed // a type that subsumes the full member type. - def checkMemberTypesOK() = { + def checkMemberTypesOK() = // First compute all member names we need to check in `membersToCheck`. // We do not check @@ -772,7 +740,7 @@ object RefChecks { val membersToCheck = new util.HashSet[Name](4096) val seenClasses = new util.HashSet[Symbol](256) def addDecls(cls: Symbol): Unit = - if (!seenClasses.contains(cls)) { + if (!seenClasses.contains(cls)) seenClasses += cls for (mbr <- cls.info.decls) if (mbr.isTerm && !mbr.isOneOf(Synthetic | Bridge) && mbr.memberCanMatchInheritedSymbols && @@ -782,14 +750,13 @@ object RefChecks { .filter(_.isOneOf(AbstractOrTrait)) .dropWhile(_.isOneOf(JavaDefined | Scala2x)) .foreach(addDecls) - } addDecls(clazz) // For each member, check that the type of its symbol, as seen from `self` // can override the info of this member - withMode(Mode.IgnoreCaptures) { + withMode(Mode.IgnoreCaptures): for (name <- membersToCheck) - for (mbrd <- self.member(name).alternatives) { + for (mbrd <- self.member(name).alternatives) val mbr = mbrd.symbol val mbrType = mbr.info.asSeenFrom(self, mbr.owner) if (!mbrType.overrides(mbrd.info, relaxedCheck = false, matchLoosely = true)) @@ -798,9 +765,6 @@ object RefChecks { | its type $mbrType | does not conform to ${mbrd.info}""", (if (mbr.owner == clazz) mbr else clazz).srcPos, from = `3.0`) - } - } - } /** Check that inheriting a case class does not constitute a variant refinement * of a base type of the case class. It is because of this restriction that we @@ -821,9 +785,8 @@ object RefChecks { checkMemberTypesOK() checkCaseClassInheritanceInvariant() - } - if (!clazz.is(Trait)) { + if (!clazz.is(Trait)) // check that parameterized base classes and traits are typed in the same way as from the superclass // I.e. say we have // @@ -835,7 +798,7 @@ object RefChecks { // // This is necessary because parameter values are determined directly or indirectly // by `Super`. So we cannot pretend they have a different type when seen from `Sub`. - def checkParameterizedTraitsOK() = { + def checkParameterizedTraitsOK() = val mixins = clazz.mixins for { cls <- clazz.info.baseClasses.tail @@ -843,10 +806,8 @@ object RefChecks { problem <- variantInheritanceProblems(cls, clazz.asClass.superClass, "parameterized", "super") } report.error(problem, clazz.srcPos) - } checkParameterizedTraitsOK() - } /** Check that `site` does not inherit conflicting generic instances of `baseCls`, * when doing a direct base type or going via intermediate class `middle`. I.e, we require: @@ -856,7 +817,7 @@ object RefChecks { * Return an optional by name error message if this test fails. */ def variantInheritanceProblems( - baseCls: Symbol, middle: Symbol, baseStr: String, middleStr: String): Option[Message] = { + baseCls: Symbol, middle: Symbol, baseStr: String, middleStr: String): Option[Message] = val superBT = self.baseType(middle) val thisBT = self.baseType(baseCls) val combinedBT = superBT.baseType(baseCls) @@ -867,18 +828,16 @@ object RefChecks { | | Direct basetype: $thisBT | Basetype via $middleStr$middle: $combinedBT""") - } /* Returns whether there is a symbol declared in class `inclazz` * (which must be different from `clazz`) whose name and type * seen as a member of `class.thisType` matches `member`'s. */ - def hasMatchingSym(inclazz: Symbol, member: Symbol): Boolean = { + def hasMatchingSym(inclazz: Symbol, member: Symbol): Boolean = - def isSignatureMatch(sym: Symbol) = sym.isType || { + def isSignatureMatch(sym: Symbol) = sym.isType `||`: val self = clazz.thisType sym.asSeenFrom(self).matches(member.asSeenFrom(self)) - } /* The rules for accessing members which have an access boundary are more * restrictive in java than scala. Since java has no concept of package nesting, @@ -907,20 +866,18 @@ object RefChecks { (inclazz != clazz) && classDecls.hasAltWith(d => isSignatureMatch(d.symbol) && javaAccessCheck(d.symbol)) - } // 4. Check that every defined member with an `override` modifier overrides some other member. for (member <- clazz.info.decls) - if (member.isAnyOverride && !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) { + if (member.isAnyOverride && !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) if (checks != noPrinter) - for (bc <- clazz.info.baseClasses.tail) { + for (bc <- clazz.info.baseClasses.tail) val sym = bc.info.decl(member.name).symbol if (sym.exists) checks.println(i"$bc has $sym: ${clazz.thisType.memberInfo(sym)}") - } val nonMatching = clazz.info.member(member.name).altsWith(alt => alt.owner != clazz) - nonMatching match { + nonMatching match case Nil => report.error(OverridesNothing(member), member.srcPos) case ms => @@ -932,11 +889,8 @@ object RefChecks { // an error. if (!(member.name == nme.getClass_ && clazz.isPrimitiveValueClass)) report.error(OverridesNothingButNameExists(member, ms), member.srcPos) - } member.resetFlag(Override) member.resetFlag(AbsOverride) - } - } /** Check that we do not "override" anything with a private method * or something that becomes a private method. According to the Scala @@ -1030,11 +984,10 @@ object RefChecks { do checkReferences(param.denot) private object PositionedStringLiteralArgument: - def unapply(tree: Tree): Option[(String, Span)] = tree match { + def unapply(tree: Tree): Option[(String, Span)] = tree match case l@Literal(Constant(s: String)) => Some((s, l.span)) case NamedArg(_, l@Literal(Constant(s: String))) => Some((s, l.span)) case _ => None - } private def checkReferences(sd: SymDenotation)(using Context): Unit = lazy val substitutableTypesNames = @@ -1096,7 +1049,6 @@ object RefChecks { && (!ctx.owner.enclosingClass.exists || ctx.owner.enclosingClass.isPackageObject) then report.warning(UnqualifiedCallToAnyRefMethod(tree, tree.symbol), tree) -} import RefChecks._ /** Post-attribution checking and transformation, which fulfills the following roles @@ -1140,27 +1092,23 @@ class RefChecks extends MiniPhase { thisPhase => override def runsAfter: Set[String] = Set(ElimRepeated.name) // Needs to run after ElimRepeated for override checks involving varargs methods - override def transformValDef(tree: ValDef)(using Context): ValDef = { + override def transformValDef(tree: ValDef)(using Context): ValDef = if tree.symbol.exists then checkNoPrivateOverrides(tree) val sym = tree.symbol - if (sym.exists && sym.owner.isTerm) { - tree.rhs match { + if (sym.exists && sym.owner.isTerm) + tree.rhs match case Ident(nme.WILDCARD) => report.error(UnboundPlaceholderParameter(), sym.srcPos) case _ => - } - } tree - } - override def transformDefDef(tree: DefDef)(using Context): DefDef = { + override def transformDefDef(tree: DefDef)(using Context): DefDef = checkNoPrivateOverrides(tree) checkImplicitNotFoundAnnotation.defDef(tree.symbol.denot) checkUnaryMethods(tree.symbol) tree - } - override def transformTemplate(tree: Template)(using Context): Tree = try { + override def transformTemplate(tree: Template)(using Context): Tree = try val cls = ctx.owner.asClass checkOverloadedRestrictions(cls) checkParents(cls, tree.parents) @@ -1169,11 +1117,10 @@ class RefChecks extends MiniPhase { thisPhase => checkAllOverrides(cls) checkImplicitNotFoundAnnotation.template(cls.classDenot) tree - } catch { + catch case ex: TypeError => report.error(ex, tree.srcPos) tree - } override def transformIdent(tree: Ident)(using Context): Tree = checkAnyRefMethodCall(tree) diff --git a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala index 103961b68c29..c10231546ccf 100644 --- a/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Synthesizer.scala @@ -65,7 +65,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): end synthesizedClassTag val synthesizedTypeTest: SpecialHandler = - (formal, span) => formal.argInfos match { + (formal, span) => formal.argInfos match case arg1 :: arg2 :: Nil if !defn.isBottomClass(arg2.typeSymbol) => val srcPos = ctx.source.atSpan(span) val tp1 = fullyDefinedType(arg1, "TypeTest argument", srcPos) @@ -78,7 +78,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): EmptyTreeNoError else // Generate SAM: (s: ) => if s.isInstanceOf[] then Some(s.asInstanceOf[s.type & ]) else None - def body(args: List[Tree]): Tree = { + def body(args: List[Tree]): Tree = val arg :: Nil = args: @unchecked val t = arg.tpe & tp2 If( @@ -87,14 +87,12 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): .appliedToType(t) .appliedTo(arg.select(nme.asInstanceOf_).appliedToType(t)), ref(defn.NoneModule)) - } val tpe = MethodType(List(nme.s))(_ => List(tp1), mth => defn.OptionClass.typeRef.appliedTo(mth.newParamRef(0) & tp2)) val meth = newAnonFun(ctx.owner, tpe, coord = span) val typeTestType = defn.TypeTestClass.typeRef.appliedTo(List(tp1, tp2)) withNoErrors(Closure(meth, tss => body(tss.head).changeOwner(ctx.owner, meth), targetType = typeTestType).withSpan(span)) case _ => EmptyTreeNoError - } end synthesizedTypeTest val synthesizedTupleFunction: SpecialHandler = (formal, span) => @@ -157,9 +155,8 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): */ def validEqAnyArgs(tp1: Type, tp2: Type)(using Context) = typer.assumedCanEqual(tp1, tp2) - || withMode(Mode.StrictEquality) { + || withMode(Mode.StrictEquality): !hasEq(tp1) && !hasEq(tp2) - } /** Is an `CanEqual[cls1, cls2]` instance assumed for predefined classes `cls1`, cls2`? */ def canComparePredefinedClasses(cls1: ClassSymbol, cls2: ClassSymbol): Boolean = @@ -385,11 +382,10 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): for lsrc <- reduce(l) rsrc <- reduce(r) - res <- locally { + res <- locally: if lsrc.isSub(rsrc) then Right(lsrc) else if rsrc.isSub(lsrc) then Right(rsrc) else Left(i"its subpart `$tp` is an intersection of unrelated definitions ${lsrc.show} and ${rsrc.show}.") - } yield res case tp: OrType => @@ -420,11 +416,10 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val elemsLabels = TypeOps.nestedPairs(elemLabels) checkRefinement(formal, tpnme.MirroredElemTypes, elemsType, span) checkRefinement(formal, tpnme.MirroredElemLabels, elemsLabels, span) - val mirrorType = formal.constrained_& { + val mirrorType = formal.`constrained_&`: mirrorCore(defn.Mirror_ProductClass, monoType, mirroredType, cls.name) .refinedWith(tpnme.MirroredElemTypes, TypeAlias(elemsType)) .refinedWith(tpnme.MirroredElemLabels, TypeAlias(elemsLabels)) - } val mirrorRef = if cls.useCompanionAsProductMirror then companionPath(mirroredType, span) else if defn.isTupleClass(cls) then newTupleMirror(typeElems.size) // TODO: cls == defn.PairClass when > 22 @@ -434,32 +429,30 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): MirrorSource.reduce(mirroredType) match case Right(msrc) => msrc match - case MirrorSource.Singleton(_, tref) => - val singleton = tref.termSymbol // prefer alias name over the orignal name - val singletonPath = pathFor(tref).withSpan(span) - if tref.classSymbol.is(Scala2x) then // could be Scala 3 alias of Scala 2 case object. - val mirrorType = formal.constrained_& { - mirrorCore(defn.Mirror_SingletonProxyClass, mirroredType, mirroredType, singleton.name) - } - val mirrorRef = New(defn.Mirror_SingletonProxyClass.typeRef, singletonPath :: Nil) - withNoErrors(mirrorRef.cast(mirrorType)) - else - val mirrorType = formal.constrained_& { - mirrorCore(defn.Mirror_SingletonClass, mirroredType, mirroredType, singleton.name) - } - withNoErrors(singletonPath.cast(mirrorType)) - case MirrorSource.GenericTuple(tps) => - val maxArity = Definitions.MaxTupleArity - val arity = tps.size - if tps.size <= maxArity then - val tupleCls = defn.TupleType(arity).nn.classSymbol - makeProductMirror(tupleCls.owner.reachableThisType, tupleCls, Some(tps)) - else - val reason = s"it reduces to a tuple with arity $arity, expected arity <= $maxArity" - withErrors(i"${defn.PairClass} is not a generic product because $reason") - case MirrorSource.ClassSymbol(pre, cls) => - if cls.isGenericProduct then makeProductMirror(pre, cls, None) - else withErrors(i"$cls is not a generic product because ${cls.whyNotGenericProduct}") + case MirrorSource.Singleton(_, tref) => + val singleton = tref.termSymbol // prefer alias name over the orignal name + val singletonPath = pathFor(tref).withSpan(span) + if tref.classSymbol.is(Scala2x) then // could be Scala 3 alias of Scala 2 case object. + val mirrorType = formal.`constrained_&`: + mirrorCore(defn.Mirror_SingletonProxyClass, mirroredType, mirroredType, singleton.name) + val mirrorRef = New(defn.Mirror_SingletonProxyClass.typeRef, singletonPath :: Nil) + withNoErrors(mirrorRef.cast(mirrorType)) + else + val mirrorType = formal.`constrained_&`: + mirrorCore(defn.Mirror_SingletonClass, mirroredType, mirroredType, singleton.name) + withNoErrors(singletonPath.cast(mirrorType)) + case MirrorSource.GenericTuple(tps) => + val maxArity = Definitions.MaxTupleArity + val arity = tps.size + if tps.size <= maxArity then + val tupleCls = defn.TupleType(arity).nn.classSymbol + makeProductMirror(tupleCls.owner.reachableThisType, tupleCls, Some(tps)) + else + val reason = s"it reduces to a tuple with arity $arity, expected arity <= $maxArity" + withErrors(i"${defn.PairClass} is not a generic product because $reason") + case MirrorSource.ClassSymbol(pre, cls) => + if cls.isGenericProduct then makeProductMirror(pre, cls, None) + else withErrors(i"$cls is not a generic product because ${cls.whyNotGenericProduct}") case Left(msg) => withErrors(i"type `$mirroredType` is not a generic product because $msg") end productMirror @@ -549,11 +542,10 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): val mirrorType = val labels = TypeOps.nestedPairs(elemLabels) - formal.constrained_& { + formal.`constrained_&`: mirrorCore(defn.Mirror_SumClass, monoType, mirroredType, cls.name) .refinedWith(tpnme.MirroredElemTypes, TypeAlias(elemsType)) .refinedWith(tpnme.MirroredElemLabels, TypeAlias(labels)) - } val mirrorRef = if cls.useCompanionAsSumMirror then companionPath(mirroredType, span) else anonymousMirror(monoType, MirrorImpl.OfSum(childPres), span) @@ -562,7 +554,7 @@ class Synthesizer(typer: Typer)(using @constructorOnly c: Context): withErrors(i"type `$mirroredType` is not a generic sum because $acceptableMsg") else if !clsIsGenericSum then withErrors(i"$cls is not a generic sum because ${cls.whyNotGenericSum(pre)}") - else + else EmptyTreeNoError end sumMirror diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala index 6ac45cbcf04d..e6804d3713f9 100644 --- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala +++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala @@ -13,20 +13,20 @@ import reporting._ import Checking.{checkNoPrivateLeaks, checkNoWildcard} import cc.CaptureSet -trait TypeAssigner { +trait TypeAssigner: import tpd.* import TypeAssigner.* /** The qualifying class of a this or super with prefix `qual` (which might be empty). * @param packageOK The qualifier may refer to a package. */ - def qualifyingClass(tree: untpd.Tree, qual: Name, packageOK: Boolean)(using Context): Symbol = { + def qualifyingClass(tree: untpd.Tree, qual: Name, packageOK: Boolean)(using Context): Symbol = def qualifies(sym: Symbol) = sym.isClass && ( qual.isEmpty || sym.name == qual || sym.is(Module) && sym.name.stripModuleClassSuffix == qual) - ctx.outersIterator.map(_.owner).find(qualifies) match { + ctx.outersIterator.map(_.owner).find(qualifies) match case Some(c) if packageOK || !c.is(Package) => c case _ => @@ -35,8 +35,6 @@ trait TypeAssigner { else em"$qual is not an enclosing class", tree.srcPos) NoSymbol - } - } def avoidingType(expr: Tree, bindings: List[Tree])(using Context): Type = TypeOps.avoid(expr.tpe, localSyms(bindings).filterConserve(_.isTerm)) @@ -55,7 +53,7 @@ trait TypeAssigner { /** A denotation exists really if it exists and does not point to a stale symbol. */ final def reallyExists(denot: Denotation)(using Context): Boolean = try - denot match { + denot match case denot: SymDenotation => denot.exists && !denot.isAbsent() case denot: SingleDenotation => @@ -63,10 +61,8 @@ trait TypeAssigner { (sym eq NoSymbol) || reallyExists(sym.denot) case _ => true - } - catch { + catch case ex: StaleSymbol => false - } /** If `tpe` is a named type, return the type with those alternatives as denotations * which are accessible (or NoType, if no alternatives are accessible). @@ -204,52 +200,46 @@ trait TypeAssigner { /** Normalize type T appearing in a new T by following eta expansions to * avoid higher-kinded types. */ - def typeOfNew(tpt: Tree)(using Context): Type = tpt.tpe.dealias match { + def typeOfNew(tpt: Tree)(using Context): Type = tpt.tpe.dealias match case TypeApplications.EtaExpansion(tycon) => tycon case t => tpt.tpe - } def assignType(tree: untpd.New, tpt: Tree)(using Context): New = tree.withType(typeOfNew(tpt)) def assignType(tree: untpd.Literal)(using Context): Literal = - tree.withType { + tree.withType: val value = tree.const - value.tag match { + value.tag match case UnitTag => defn.UnitType case NullTag => defn.NullType case _ => if (ctx.erasedTypes) value.tpe else ConstantType(value) - } - } - def assignType(tree: untpd.This)(using Context): This = { + def assignType(tree: untpd.This)(using Context): This = val cls = qualifyingClass(tree, tree.qual.name, packageOK = false) tree.withType( if (cls.isClass) cls.thisType else errorType(em"not a legal qualifying class for this", tree.srcPos)) - } def superType(qualType: Type, mix: untpd.Ident, mixinClass: Symbol, pos: SrcPos)(using Context) = qualType match case err: ErrorType => err case qtype @ ThisType(_) => val cls = qtype.cls - def findMixinSuper(site: Type): Type = site.parents filter (_.typeSymbol.name == mix.name) match { + def findMixinSuper(site: Type): Type = site.parents filter (_.typeSymbol.name == mix.name) match case p :: Nil => p.typeConstructor case Nil => errorType(SuperQualMustBeParent(mix, cls), pos) case p :: q :: _ => errorType(em"ambiguous parent class qualifier", pos) - } val owntype = if (mixinClass.exists) mixinClass.typeRef else if (!mix.isEmpty) findMixinSuper(cls.info) else if (ctx.erasedTypes) cls.info.firstParent.typeConstructor - else { + else val ps = cls.classInfo.parents if (ps.isEmpty) defn.AnyType else ps.reduceLeft((x: Type, y: Type) => x & y) - } SuperType(cls.thisType, owntype) def assignType(tree: untpd.Super, qual: Tree, mixinClass: Symbol = NoSymbol)(using Context): Super = @@ -259,30 +249,28 @@ trait TypeAssigner { /** Substitute argument type `argType` for parameter `pref` in type `tp`, * skolemizing the argument type if it is not stable and `pref` occurs in `tp`. */ - def safeSubstParam(tp: Type, pref: ParamRef, argType: Type)(using Context): Type = { + def safeSubstParam(tp: Type, pref: ParamRef, argType: Type)(using Context): Type = val tp1 = tp.substParam(pref, argType) if ((tp1 eq tp) || argType.isStable) tp1 else tp.substParam(pref, SkolemType(argType.widen)) - } /** Substitute types of all arguments `args` for corresponding `params` in `tp`. * The number of parameters `params` may exceed the number of arguments. * In this case, only the common prefix is substituted. */ - def safeSubstParams(tp: Type, params: List[ParamRef], argTypes: List[Type])(using Context): Type = argTypes match { + def safeSubstParams(tp: Type, params: List[ParamRef], argTypes: List[Type])(using Context): Type = argTypes match case argType :: argTypes1 => val tp1 = safeSubstParam(tp, params.head, argType) safeSubstParams(tp1, params.tail, argTypes1) case Nil => tp - } def safeSubstMethodParams(mt: MethodType, argTypes: List[Type])(using Context): Type = if mt.isResultDependent then safeSubstParams(mt.resultType, mt.paramRefs, argTypes) else mt.resultType - def assignType(tree: untpd.Apply, fn: Tree, args: List[Tree])(using Context): Apply = { - val ownType = fn.tpe.widen match { + def assignType(tree: untpd.Apply, fn: Tree, args: List[Tree])(using Context): Apply = + val ownType = fn.tpe.widen match case fntpe: MethodType => if fntpe.paramInfos.hasSameLengthAs(args) || ctx.phase.prev.relaxedTyping then if fntpe.isResultDependent then safeSubstMethodParams(fntpe, args.tpes) @@ -292,17 +280,15 @@ trait TypeAssigner { case t => if (ctx.settings.Ydebug.value) new FatalError("").printStackTrace() errorType(err.takesNoParamsMsg(fn, ""), tree.srcPos) - } ConstFold.Apply(tree.withType(ownType)) - } - def assignType(tree: untpd.TypeApply, fn: Tree, args: List[Tree])(using Context): TypeApply = { + def assignType(tree: untpd.TypeApply, fn: Tree, args: List[Tree])(using Context): TypeApply = def fail = tree.withType(errorType(err.takesNoParamsMsg(fn, "type "), tree.srcPos)) ConstFold(fn.tpe.widen match { case pt: TypeLambda => - tree.withType { + tree.withType: val paramNames = pt.paramNames - if (hasNamedArg(args)) { + if (hasNamedArg(args)) val paramBoundsByName = paramNames.zip(pt.paramInfos).toMap // Type arguments which are specified by name (immutable after this first loop) @@ -317,35 +303,30 @@ trait TypeAssigner { // Holds indexes of non-named typed arguments in paramNames val gapBuf = new mutable.ListBuffer[Int] - def nextPoly(idx: Int) = { + def nextPoly(idx: Int) = val newIndex = gapBuf.length gapBuf += idx // Re-index unassigned type arguments that remain after transformation pt.paramRefs(newIndex) - } // Type parameters after naming assignment, conserving paramNames order val normArgs: List[Type] = paramNames.zipWithIndex.map { case (pname, idx) => namedArgMap.getOrElse(pname, nextPoly(idx)) } - val transform = new TypeMap { - def apply(t: Type) = t match { + val transform = new TypeMap: + def apply(t: Type) = t match case TypeParamRef(`pt`, idx) => normArgs(idx) case _ => mapOver(t) - } - } val resultType1 = transform(pt.resultType) if (gapBuf.isEmpty) resultType1 - else { + else val gaps = gapBuf.toList pt.derivedLambdaType( gaps.map(paramNames), gaps.map(idx => transform(pt.paramInfos(idx)).bounds), resultType1) - } - } - else { + else // Make sure arguments don't contain the type `pt` itself. // make a copy of the argument if that's the case. // This is done to compensate for the fact that normally every @@ -359,23 +340,19 @@ trait TypeAssigner { val argTypes = args.tpes.mapConserve(ensureFresh) if (argTypes.hasSameLengthAs(paramNames)) pt.instantiate(argTypes) else wrongNumberOfTypeArgs(fn.tpe, pt.typeParams, args, tree.srcPos) - } - } case err: ErrorType => tree.withType(err) case ref: TermRef if ref.isOverloaded => val disambiguated = ref.denot.suchThat(_.info.isInstanceOf[PolyType]) - if (disambiguated.exists) { + if (disambiguated.exists) val fn1 = fn.withType(ref.withDenot(disambiguated)) val tree1 = untpd.cpy.TypeApply(tree)(fn1, args) assignType(tree1, fn1, args) - } else fail case _ => //println(i"bad type: $fn: ${fn.symbol} / ${fn.symbol.isType} / ${fn.symbol.info}") // DEBUG fail }) - } def assignType(tree: untpd.Typed, tpt: Tree)(using Context): Typed = tree.withType(tpt.tpe) @@ -400,15 +377,13 @@ trait TypeAssigner { if (target.isEmpty) meth.tpe.widen.toFunctionType(isJava = meth.symbol.is(JavaDefined), tree.env.length) else target.tpe) - def assignType(tree: untpd.CaseDef, pat: Tree, body: Tree)(using Context): CaseDef = { + def assignType(tree: untpd.CaseDef, pat: Tree, body: Tree)(using Context): CaseDef = val ownType = - if (body.isType) { - val getParams = new TreeAccumulator[mutable.ListBuffer[TypeSymbol]] { - def apply(ps: mutable.ListBuffer[TypeSymbol], t: Tree)(using Context) = t match { + if (body.isType) + val getParams = new TreeAccumulator[mutable.ListBuffer[TypeSymbol]]: + def apply(ps: mutable.ListBuffer[TypeSymbol], t: Tree)(using Context) = t match case t: Bind if t.symbol.isType => foldOver(ps += t.symbol.asType, t) case _ => foldOver(ps, t) - } - } val params1 = getParams(new mutable.ListBuffer[TypeSymbol](), pat).toList val params2 = pat.tpe match case AppliedType(tycon, args) => @@ -422,10 +397,8 @@ trait TypeAssigner { val matchCase1 = defn.MatchCase(pat.tpe, body.tpe) val matchCase2 = if params2 eq params1 then matchCase1 else matchCase1.substSym(params1, params2) HKTypeLambda.fromParams(params2, matchCase2) - } else body.tpe tree.withType(ownType) - } def assignType(tree: untpd.Match, scrutinee: Tree, cases: List[CaseDef])(using Context): Match = tree.withType(TypeComparer.lub(cases.tpes)) @@ -452,19 +425,17 @@ trait TypeAssigner { /** Assign type of RefinedType. * Refinements are typed as if they were members of refinement class `refineCls`. */ - def assignType(tree: untpd.RefinedTypeTree, parent: Tree, refinements: List[Tree], refineCls: ClassSymbol)(using Context): RefinedTypeTree = { - def addRefinement(parent: Type, refinement: Tree): Type = { + def assignType(tree: untpd.RefinedTypeTree, parent: Tree, refinements: List[Tree], refineCls: ClassSymbol)(using Context): RefinedTypeTree = + def addRefinement(parent: Type, refinement: Tree): Type = val rsym = refinement.symbol val rinfo = if (rsym.is(Accessor)) rsym.info.resultType else rsym.info if (rinfo.isError) rinfo else if (!rinfo.exists) parent // can happen after failure in self type definition else RefinedType(parent, rsym.name, rinfo) - } val refined = refinements.foldLeft(parent.tpe)(addRefinement) tree.withType(RecType.closeOver(rt => refined.substThis(refineCls, rt.recThis))) - } - def assignType(tree: untpd.AppliedTypeTree, tycon: Tree, args: List[Tree])(using Context): AppliedTypeTree = { + def assignType(tree: untpd.AppliedTypeTree, tycon: Tree, args: List[Tree])(using Context): AppliedTypeTree = assert(!hasNamedArg(args) || ctx.reporter.errorsReported, tree) val tparams = tycon.tpe.typeParams val ownType = @@ -473,7 +444,6 @@ trait TypeAssigner { else wrongNumberOfTypeArgs(tycon.tpe, tparams, args, tree.srcPos) tree.withType(ownType) - } def assignType(tree: untpd.LambdaTypeTree, tparamDefs: List[TypeDef], body: Tree)(using Context): LambdaTypeTree = val validParams = tparamDefs.filterConserve { tdef => @@ -483,10 +453,9 @@ trait TypeAssigner { } tree.withType(HKTypeLambda.fromParams(validParams.map(_.symbol.asType), body.tpe)) - def assignType(tree: untpd.MatchTypeTree, bound: Tree, scrutinee: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = { + def assignType(tree: untpd.MatchTypeTree, bound: Tree, scrutinee: Tree, cases: List[CaseDef])(using Context): MatchTypeTree = val boundType = if (bound.isEmpty) defn.AnyType else bound.tpe tree.withType(MatchType(boundType, scrutinee.tpe, cases.tpes)) - } def assignType(tree: untpd.ByNameTypeTree, result: Tree)(using Context): ByNameTypeTree = tree.withType(ExprType(result.tpe)) @@ -497,7 +466,7 @@ trait TypeAssigner { else if lo eq hi then if lo.tpe.isMatch then MatchAlias(lo.tpe) else TypeAlias(lo.tpe) - else TypeBounds(lo.tpe, hi.tpe)) + else TypeBounds(lo.tpe, hi.tpe)) def assignType(tree: untpd.Bind, sym: Symbol)(using Context): Bind = tree.withType(NamedType(NoPrefix, sym)) @@ -525,10 +494,9 @@ trait TypeAssigner { def assignType(tree: untpd.Export)(using Context): Export = tree.withType(defn.UnitType) - def assignType(tree: untpd.Annotated, arg: Tree, annot: Tree)(using Context): Annotated = { + def assignType(tree: untpd.Annotated, arg: Tree, annot: Tree)(using Context): Annotated = assert(tree.isType) // annotating a term is done via a Typed node, can't use Annotate directly tree.withType(AnnotatedType(arg.tpe, Annotation(annot))) - } def assignType(tree: untpd.PackageDef, pid: Tree)(using Context): PackageDef = tree.withType(pid.symbol.termRef) @@ -536,7 +504,6 @@ trait TypeAssigner { def assignType(tree: untpd.Hole, tpt: Tree)(using Context): Hole = tree.withType(tpt.tpe) -} object TypeAssigner extends TypeAssigner: def seqLitType(tree: untpd.SeqLiteral, elemType: Type)(using Context) = tree match diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 2e7444af8e96..d9d678b247c0 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -55,16 +55,15 @@ import config.Config import scala.annotation.constructorOnly -object Typer { +object Typer: /** The precedence of bindings which determines which of several bindings will be * accessed by an Ident. */ - enum BindingPrec { + enum BindingPrec: case NothingBound, PackageClause, WildImport, NamedImport, Inheritance, Definition def isImportPrec = this == NamedImport || this == WildImport - } /** Assert tree has a position, unless it is empty or a typed splice */ def assertPositioned(tree: untpd.Tree)(using Context): Unit = @@ -98,17 +97,15 @@ object Typer { /** Is tree a compiler-generated `.apply` node that refers to the * apply of a function class? */ - private[typer] def isSyntheticApply(tree: tpd.Tree): Boolean = tree match { + private[typer] def isSyntheticApply(tree: tpd.Tree): Boolean = tree match case tree: tpd.Select => tree.hasAttachment(InsertedApply) case TypeApply(fn, targs) => isSyntheticApply(fn) && targs.forall(_.isInstanceOf[tpd.InferredTypeTree]) case _ => false - } /** Add `fail` to the list of search failures attached to `tree` */ def rememberSearchFailure(tree: tpd.Tree, fail: SearchFailure) = tree.putAttachment(HiddenSearchFailure, fail :: tree.attachmentOrElse(HiddenSearchFailure, Nil)) -} /** Typecheck trees, the main entry point is `typed`. * * @param nestingLevel The nesting level of the `scope` of this Typer. @@ -122,7 +119,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer with Dynamic with Checking with QuotesAndSplices - with Deriving { + with Deriving: import Typer._ import tpd.{cpy => _, _} @@ -165,7 +162,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * a reference for `m` is searched. `null` in all other situations. */ def findRef(name: Name, pt: Type, required: FlagSet, excluded: FlagSet, pos: SrcPos, - altImports: mutable.ListBuffer[TermRef] | Null = null)(using Context): Type = { + altImports: mutable.ListBuffer[TermRef] | Null = null)(using Context): Type = val refctx = ctx val noImports = ctx.mode.is(Mode.InPackageClauseName) def suppressErrors = excluded.is(ConstructorProxy) @@ -210,7 +207,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * @param prevCtx The context of the previous denotation, * or else `NoContext` if nothing was found yet. */ - def findRefRecur(previous: Type, prevPrec: BindingPrec, prevCtx: Context)(using Context): Type = { + def findRefRecur(previous: Type, prevPrec: BindingPrec, prevCtx: Context)(using Context): Type = import BindingPrec._ /** Check that any previously found result from an inner context @@ -315,7 +312,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** The type representing a named import with enclosing name when imported * from given `site` and `selectors`. */ - def namedImportRef(imp: ImportInfo)(using Context): Type = { + def namedImportRef(imp: ImportInfo)(using Context): Type = val termName = name.toTermName def recur(selectors: List[untpd.ImportSelector]): Type = selectors match case selector :: rest => @@ -338,7 +335,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer NoType recur(imp.selectors) - } /** The type representing a wildcard import with enclosing name when imported * from given import info @@ -351,16 +347,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Is (some alternative of) the given predenotation `denot` * defined in current compilation unit? */ - def isDefinedInCurrentUnit(denot: Denotation)(using Context): Boolean = denot match { + def isDefinedInCurrentUnit(denot: Denotation)(using Context): Boolean = denot match case MultiDenotation(d1, d2) => isDefinedInCurrentUnit(d1) || isDefinedInCurrentUnit(d2) case denot: SingleDenotation => (ctx.compilationUnit ne NoCompilationUnit) && denot.symbol.source == ctx.compilationUnit.source - } /** Is `denot` the denotation of a self symbol? */ - def isSelfDenot(denot: Denotation)(using Context) = denot match { + def isSelfDenot(denot: Denotation)(using Context) = denot match case denot: SymDenotation => denot.is(SelfName) case _ => false - } /** Would import of kind `prec` be not shadowed by a nested higher-precedence definition? */ def isPossibleImport(prec: BindingPrec)(using Context) = @@ -369,7 +363,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer @tailrec def loop(lastCtx: Context)(using Context): Type = if (ctx.scope eq EmptyScope) previous - else { + else var result: Type = NoType val curOwner = ctx.owner @@ -420,7 +414,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer */ def checkNoOuterDefs(denot: Denotation, last: Context, prevCtx: Context): Unit = def sameTermOrType(d1: SingleDenotation, d2: Denotation) = - d2.containsSym(d1.symbol) || d2.hasUniqueSym && { + d2.containsSym(d1.symbol) || d2.hasUniqueSym `&&`: val sym1 = d1.symbol val sym2 = d2.symbol if sym1.isTerm then @@ -429,7 +423,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer sym1.termRef =:= sym2.termRef else (sym1.isAliasType || sym2.isAliasType) && d1.info =:= d2.info - } val outer = last.outer val owner = outer.owner if (owner eq last.owner) && (outer.scope eq last.scope) then @@ -460,12 +453,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if isNewDefScope then val defDenot = ctx.denotNamed(name, required, excluded) - if (qualifies(defDenot)) { + if (qualifies(defDenot)) val found = if (isSelfDenot(defDenot)) curOwner.enclosingClass.thisType - else if (ctx.isJava && defDenot.symbol.isStatic) { + else if (ctx.isJava && defDenot.symbol.isStatic) defDenot.symbol.namedType - } else { + else val effectiveOwner = if (curOwner.isTerm && defDenot.symbol.maybeOwner.isType) // Don't mix NoPrefix and thisType prefixes, since type comparer @@ -474,7 +467,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else curOwner effectiveOwner.thisType.select(name, defDenot) - } if !curOwner.is(Package) || isDefinedInCurrentUnit(defDenot) then result = checkNewOrShadowed(found, Definition) // no need to go further out, we found highest prec entry found match @@ -489,44 +481,35 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer result = checkNewOrShadowed(previous orElse found, PackageClause) else if (prevPrec.ordinal < PackageClause.ordinal) result = findRefRecur(found, PackageClause, ctx)(using ctx.outer) - } if result.exists then result - else { // find import + else // find import val outer = ctx.outer val curImport = ctx.importInfo def updateUnimported() = if (curImport.nn.unimported ne NoSymbol) unimported += curImport.nn.unimported if (curOwner.is(Package) && curImport != null && curImport.isRootImport && previous.exists) previous // no more conflicts possible in this case - else if (isPossibleImport(NamedImport) && (curImport nen outer.importInfo)) { + else if (isPossibleImport(NamedImport) && (curImport nen outer.importInfo)) val namedImp = namedImportRef(curImport.uncheckedNN) if (namedImp.exists) checkImportAlternatives(namedImp, NamedImport, ctx)(using outer) - else if (isPossibleImport(WildImport) && !curImport.nn.importSym.isCompleting) { + else if (isPossibleImport(WildImport) && !curImport.nn.importSym.isCompleting) val wildImp = wildImportRef(curImport.uncheckedNN) if (wildImp.exists) checkImportAlternatives(wildImp, WildImport, ctx)(using outer) - else { + else updateUnimported() loop(ctx)(using outer) - } - } - else { + else updateUnimported() loop(ctx)(using outer) - } - } else loop(ctx)(using outer) - } - } // begin findRefRecur loop(NoContext) - } findRefRecur(NoType, BindingPrec.NothingBound, NoContext) - } /** If `tree`'s type is a `TermRef` identified by flow typing to be non-null, then * cast away `tree`s nullability. Otherwise, `tree` remains unchanged. @@ -712,7 +695,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else val tree1 = tryExtensionOrConversion( tree, pt, IgnoredProto(pt), qual, ctx.typerState.ownedVars, this, inSelect = true) - .orElse { + .orElse: if ctx.gadt.isNarrowing then // try GADT approximation if we're trying to select a member // Member lookup cannot take GADTs into account b/c of cache, so we @@ -734,7 +717,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else tryExtensionOrConversion(tree1, pt, IgnoredProto(pt), qual1, ctx.typerState.ownedVars, this, inSelect = true) else EmptyTree - } if !tree1.isEmpty then tree1 else if canDefineFurther(qual.tpe.widen) then @@ -756,7 +738,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer notAMemberErrorType(tree, qual)) end typedSelect - def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = { + def typedSelect(tree: untpd.Select, pt: Type)(using Context): Tree = record("typedSelect") def typeSelectOnTerm(using Context): Tree = @@ -771,7 +753,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else assignType(cpy.Select(tree)(qual, tree.name), qual) - def tryJavaSelectOnType(using Context): Tree = tree.qualifier match { + def tryJavaSelectOnType(using Context): Tree = tree.qualifier match case sel @ Select(qual, name) => val qual1 = untpd.cpy.Select(sel)(qual, name.toTypeName) val qual2 = typedType(qual1, WildcardType) @@ -784,124 +766,105 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => errorTree(tree, em"cannot convert to type selection") // will never be printed due to fallback - } def selectWithFallback(fallBack: Context ?=> Tree) = tryAlternatively(typeSelectOnTerm)(fallBack) - if (tree.qualifier.isType) { + if (tree.qualifier.isType) val qual1 = typedType(tree.qualifier, shallowSelectionProto(tree.name, pt, this)) assignType(cpy.Select(tree)(qual1, tree.name), qual1) - } else if (ctx.isJava && tree.name.isTypeName) // SI-3120 Java uses the same syntax, A.B, to express selection from the // value A and from the type A. We have to try both. selectWithFallback(tryJavaSelectOnType) // !!! possibly exponential bcs of qualifier retyping else typeSelectOnTerm - } - def typedThis(tree: untpd.This)(using Context): Tree = { + def typedThis(tree: untpd.This)(using Context): Tree = record("typedThis") assignType(tree) - } - def typedSuper(tree: untpd.Super, pt: Type)(using Context): Tree = { + def typedSuper(tree: untpd.Super, pt: Type)(using Context): Tree = val qual1 = typed(tree.qual) val enclosingInlineable = ctx.owner.ownersIterator.findSymbol(_.isInlineMethod) if (enclosingInlineable.exists && !PrepareInlineable.isLocal(qual1.symbol, enclosingInlineable)) report.error(SuperCallsNotAllowedInlineable(enclosingInlineable), tree.srcPos) - pt match { + pt match case pt: SelectionProto if pt.name.isTypeName => qual1 // don't do super references for types; they are meaningless anyway case _ => assignType(cpy.Super(tree)(qual1, tree.mix), qual1) - } - } - def typedNumber(tree: untpd.Number, pt: Type)(using Context): Tree = { + def typedNumber(tree: untpd.Number, pt: Type)(using Context): Tree = import scala.util.FromDigits._ import untpd.NumberKind._ record("typedNumber") val digits = tree.digits val target = pt.dealias def lit(value: Any) = Literal(Constant(value)).withSpan(tree.span) - try { + try // Special case primitive numeric types if (target.isRef(defn.IntClass) || target.isRef(defn.CharClass) || target.isRef(defn.ByteClass) || target.isRef(defn.ShortClass)) - tree.kind match { + tree.kind match case Whole(radix) => return lit(intFromDigits(digits, radix)) case _ => - } else if (target.isRef(defn.LongClass)) - tree.kind match { + tree.kind match case Whole(radix) => return lit(longFromDigits(digits, radix)) case _ => - } else if (target.isRef(defn.FloatClass)) - tree.kind match { + tree.kind match case Whole(16) => // cant parse hex literal as float case _ => val float = floatFromDigits(digits) if digits.toIntOption.exists(_ != float.toInt) then report.warning(LossyWideningConstantConversion(defn.IntType, target), tree.srcPos) return lit(float) - } else if (target.isRef(defn.DoubleClass)) - tree.kind match { + tree.kind match case Whole(16) => // cant parse hex literal as double case _ => return lit(doubleFromDigits(digits)) - } else if Feature.genericNumberLiteralsEnabled && target.isValueType && isFullyDefined(target, ForceDegree.none) then // If expected type is defined with a FromDigits instance, use that one - val fromDigitsCls = tree.kind match { + val fromDigitsCls = tree.kind match case Whole(10) => defn.FromDigitsClass case Whole(_) => defn.FromDigits_WithRadixClass case Decimal => defn.FromDigits_DecimalClass case Floating => defn.FromDigits_FloatingClass - } - inferImplicit(fromDigitsCls.typeRef.appliedTo(target), EmptyTree, tree.span) match { + inferImplicit(fromDigitsCls.typeRef.appliedTo(target), EmptyTree, tree.span) match case SearchSuccess(arg, _, _, _) => val fromDigits = untpd.Select(untpd.TypedSplice(arg), nme.fromDigits).withSpan(tree.span) val firstArg = Literal(Constant(digits)) - val otherArgs = tree.kind match { + val otherArgs = tree.kind match case Whole(r) if r != 10 => Literal(Constant(r)) :: Nil case _ => Nil - } var app: untpd.Tree = untpd.Apply(fromDigits, firstArg :: otherArgs) if (ctx.mode.is(Mode.Pattern)) app = untpd.Block(Nil, app) return typed(app, pt) case _ => - } // Otherwise convert to Int or Double according to digits format - tree.kind match { + tree.kind match case Whole(radix) => lit(intFromDigits(digits, radix)) case _ => lit(doubleFromDigits(digits)) - } - } - catch { + catch case ex: FromDigitsException => report.error(ex.getMessage.nn, tree.srcPos) - tree.kind match { + tree.kind match case Whole(_) => lit(0) case _ => lit(0.0) - } - } - } - def typedLiteral(tree: untpd.Literal)(using Context): Tree = { + def typedLiteral(tree: untpd.Literal)(using Context): Tree = val tree1 = assignType(tree) if (ctx.mode.is(Mode.Type)) tpd.SingletonTypeTree(tree1) // this ensures that tree is classified as a type tree else tree1 - } def typedNew(tree: untpd.New, pt: Type)(using Context): Tree = - tree.tpt match { + tree.tpt match case templ: untpd.Template => import untpd._ var templ1 = templ @@ -925,17 +888,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if tsym.is(Package) then report error(em"$tsym cannot be instantiated", tpt1.srcPos) tpt1 = tpt1.withType(ensureAccessible(tpt1.tpe, superAccess = false, tpt1.srcPos)) - tpt1 match { + tpt1 match case AppliedTypeTree(_, targs) => for case targ: TypeBoundsTree <- targs do report.error(WildcardOnTypeArgumentNotAllowedOnNew(), targ.srcPos) case _ => - } assignType(cpy.New(tree)(tpt1), tpt1) - } - def typedTyped(tree: untpd.Typed, pt: Type)(using Context): Tree = { + def typedTyped(tree: untpd.Typed, pt: Type)(using Context): Tree = /* Handles three cases: * @param ifPat how to handle a pattern (_: T) @@ -943,17 +904,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * @param wildName what name `w` to use in the rewriting of * (x: T) to (x @ (w: T)). This is either `_` or `_*`. */ - def cases(ifPat: => Tree, ifExpr: => Tree, wildName: TermName) = tree.expr match { + def cases(ifPat: => Tree, ifExpr: => Tree, wildName: TermName) = tree.expr match case id: untpd.Ident if (ctx.mode is Mode.Pattern) && untpd.isVarPattern(id) => if (id.name == nme.WILDCARD || id.name == nme.WILDCARD_STAR) ifPat - else { + else import untpd._ typed(Bind(id.name, Typed(Ident(wildName), tree.tpt)).withSpan(tree.span), pt) - } case _ => ifExpr - } - def ascription(tpt: Tree, isWildcard: Boolean) = { + def ascription(tpt: Tree, isWildcard: Boolean) = val underlyingTreeTpe = if (isRepeatedParamType(tpt)) TypeTree(defn.SeqType.appliedTo(pt :: Nil)) else tpt @@ -962,10 +921,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else typed(tree.expr, underlyingTreeTpe.tpe.widenSkolem) assignType(cpy.Typed(tree)(expr1, tpt), underlyingTreeTpe) .withNotNullInfo(expr1.notNullInfo) - } - if (untpd.isWildcardStarArg(tree)) { - def typedWildcardStarArgExpr = { + if (untpd.isWildcardStarArg(tree)) + def typedWildcardStarArgExpr = // A sequence argument `xs: _*` can be either a `Seq[T]` or an `Array[_ <: T]`, // irrespective of whether the method we're calling is a Java or Scala method, // so the expected type is the union `Seq[T] | Array[_ <: T]`. @@ -984,27 +942,24 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else // We need to make sure its type is no longer nullable expr0.castToNonNullable - else expr0 + else expr0 val fromCls = if expr1.tpe.derivesFrom(defn.ArrayClass) then defn.ArrayClass else defn.SeqClass val tpt1 = TypeTree(expr1.tpe.widen.translateToRepeated(fromCls)).withSpan(tree.tpt.span) assignType(cpy.Typed(tree)(expr1, tpt1), tpt1) - } cases( ifPat = ascription(TypeTree(defn.RepeatedParamType.appliedTo(pt)), isWildcard = true), ifExpr = typedWildcardStarArgExpr, wildName = nme.WILDCARD_STAR) - } - else { + else def typedTpt = checkSimpleKinded(typedType(tree.tpt, mapPatternBounds = true)) - def handlePattern: Tree = { + def handlePattern: Tree = val tpt1 = typedTpt if !ctx.isAfterTyper && pt != defn.ImplicitScrutineeTypeRef then - withMode(Mode.GadtConstraintInference) { + withMode(Mode.GadtConstraintInference): TypeComparer.constrainPatternType(tpt1.tpe, pt) - } val matched = ascription(tpt1, isWildcard = true) // special case for an abstract type that comes with a class tag val result = tryWithTypeTest(matched, pt) @@ -1015,47 +970,40 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // no check for matchability if TestTest was applied checkMatchable(pt, tree.srcPos, pattern = true) result - } cases( ifPat = handlePattern, ifExpr = ascription(typedTpt, isWildcard = false), wildName = nme.WILDCARD) - } - } /** For a typed tree `e: T`, if `T` is an abstract type for which an implicit type test or class tag `tt` * exists, rewrite to `tt(e)`. * @pre We are in pattern-matching mode (Mode.Pattern) */ def tryWithTypeTest(tree: Typed, pt: Type)(using Context): Tree = - def withTag(tpe: Type): Option[Tree] = { + def withTag(tpe: Type): Option[Tree] = require(ctx.mode.is(Mode.Pattern)) withoutMode(Mode.Pattern)( inferImplicit(tpe, EmptyTree, tree.tpt.span) ) match case SearchSuccess(clsTag, _, _, _) => - withMode(Mode.InTypeTest) { + withMode(Mode.InTypeTest): Some(typed(untpd.Apply(untpd.TypedSplice(clsTag), untpd.TypedSplice(tree.expr)), pt)) - } case _ => None - } - def tagged(tpe: Type) = { + def tagged(tpe: Type) = val tag = withTag(defn.TypeTestClass.typeRef.appliedTo(pt, tpe)) .orElse(withTag(defn.ClassTagClass.typeRef.appliedTo(tpe))) .getOrElse(tree) if tag.symbol.maybeOwner == defn.ClassTagClass && config.Feature.sourceVersion.isAtLeast(config.SourceVersion.future) then report.warning("Use of `scala.reflect.ClassTag` for type testing may be unsound. Consider using `scala.reflect.TypeTest` instead.", tree.srcPos) tag - } - tree.tpt.tpe.dealias match { + tree.tpt.tpe.dealias match case tpe @ AppliedType(tref: TypeRef, _) if !tref.symbol.isClass && !ctx.isAfterTyper && !(tpe =:= pt) => tagged(tpe) case tref: TypeRef if !tref.symbol.isClass && !ctx.isAfterTyper && !(tref =:= pt) => tagged(tref) case _ => tree - } - def typedNamedArg(tree: untpd.NamedArg, pt: Type)(using Context): NamedArg = { + def typedNamedArg(tree: untpd.NamedArg, pt: Type)(using Context): NamedArg = /* Special case for resolving types for arguments of an annotation defined in Java. * It allows that value of any type T can appear in positions where Array[T] is expected. * For example, both `@Annot(5)` and `@Annot({5, 6}) are viable calls of the constructor @@ -1064,20 +1012,18 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * with annotation contructor, as named arguments are not allowed anywhere else in Java. * Under explicit nulls, the pt could be nullable. We need to strip `Null` type first. */ - val arg1 = pt.stripNull match { + val arg1 = pt.stripNull match case AppliedType(a, typ :: Nil) if ctx.isJava && a.isRef(defn.ArrayClass) => tryAlternatively { typed(tree.arg, pt) } { val elemTp = untpd.TypedSplice(TypeTree(typ)) typed(untpd.JavaSeqLiteral(tree.arg :: Nil, elemTp), pt) } case _ => typed(tree.arg, pt) - } assignType(cpy.NamedArg(tree)(tree.name, arg1), arg1) - } def typedAssign(tree: untpd.Assign, pt: Type)(using Context): Tree = - tree.lhs match { + tree.lhs match case lhs @ Apply(fn, args) => typed(untpd.Apply(untpd.Select(fn, nme.update), args :+ tree.rhs), pt) case untpd.TypedSplice(Apply(MaybePoly(Select(fn, app), targs), args)) if app == nme.apply => @@ -1133,46 +1079,40 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } { (_, _) => reassignmentToVal } - case _ => lhsCore.tpe match { - case ref: TermRef => - val lhsVal = lhsCore.denot.suchThat(!_.is(Method)) - if (canAssign(lhsVal.symbol)) { + case _ => lhsCore.tpe match + case ref: TermRef => + val lhsVal = lhsCore.denot.suchThat(!_.is(Method)) + if (canAssign(lhsVal.symbol)) // lhsBounds: (T .. Any) as seen from lhs prefix, where T is the type of lhsVal.symbol // This ensures we do the as-seen-from on T with variance -1. Test case neg/i2928.scala - val lhsBounds = - TypeBounds.lower(lhsVal.symbol.info).asSeenFrom(ref.prefix, lhsVal.symbol.owner) - assignType(cpy.Assign(tree)(lhs1, typed(tree.rhs, lhsBounds.loBound))) + val lhsBounds = + TypeBounds.lower(lhsVal.symbol.info).asSeenFrom(ref.prefix, lhsVal.symbol.owner) + assignType(cpy.Assign(tree)(lhs1, typed(tree.rhs, lhsBounds.loBound))) .computeAssignNullable() - } - else { - val pre = ref.prefix - val setterName = ref.name.setterName - val setter = pre.member(setterName) - lhsCore match { - case lhsCore: RefTree if setter.exists => - val setterTypeRaw = pre.select(setterName, setter) - val setterType = ensureAccessible(setterTypeRaw, isSuperSelection(lhsCore), tree.srcPos) - val lhs2 = untpd.rename(lhsCore, setterName).withType(setterType) - typedUnadapted(untpd.Apply(untpd.TypedSplice(lhs2), tree.rhs :: Nil), WildcardType, locked) - case _ => - reassignmentToVal - } - } - case TryDynamicCallType => - typedDynamicAssign(tree, pt) - case tpe => - reassignmentToVal - } - } + else + val pre = ref.prefix + val setterName = ref.name.setterName + val setter = pre.member(setterName) + lhsCore match + case lhsCore: RefTree if setter.exists => + val setterTypeRaw = pre.select(setterName, setter) + val setterType = ensureAccessible(setterTypeRaw, isSuperSelection(lhsCore), tree.srcPos) + val lhs2 = untpd.rename(lhsCore, setterName).withType(setterType) + typedUnadapted(untpd.Apply(untpd.TypedSplice(lhs2), tree.rhs :: Nil), WildcardType, locked) + case _ => + reassignmentToVal + case TryDynamicCallType => + typedDynamicAssign(tree, pt) + case tpe => + reassignmentToVal def typedBlockStats(stats: List[untpd.Tree])(using Context): (List[tpd.Tree], Context) = index(stats) typedStats(stats, ctx.owner) - def typedBlock(tree: untpd.Block, pt: Type)(using Context): Tree = { - val (stats1, exprCtx) = withoutMode(Mode.Pattern) { + def typedBlock(tree: untpd.Block, pt: Type)(using Context): Tree = + val (stats1, exprCtx) = withoutMode(Mode.Pattern): typedBlockStats(tree.stats) - } var expr1 = typedExpr(tree.expr, pt.dropIfProto)(using exprCtx) @@ -1191,12 +1131,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer .withType(expr1.tpe) .withNotNullInfo(stats1.foldRight(expr1.notNullInfo)(_.notNullInfo.seq(_))), pt, localSyms(stats1)) - } - def escapingRefs(block: Tree, localSyms: => List[Symbol])(using Context): List[NamedType] = { + def escapingRefs(block: Tree, localSyms: => List[Symbol])(using Context): List[NamedType] = lazy val locals = localSyms.toSet block.tpe.namedPartsWith(tp => locals.contains(tp.symbol) && !tp.isErroneous) - } /** Ensure that an expression's type can be expressed without references to locally defined * symbols. This is done by adding a type ascription of a widened type that does @@ -1208,8 +1146,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * expected type of a block is the anonymous class defined inside it. In that * case there's technically a leak which is not removed by the ascription. */ - protected def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: => List[Symbol])(using Context): Tree = { - def ascribeType(tree: Tree, pt: Type): Tree = tree match { + protected def ensureNoLocalRefs(tree: Tree, pt: Type, localSyms: => List[Symbol])(using Context): Tree = + def ascribeType(tree: Tree, pt: Type): Tree = tree match case block @ Block(stats, expr) if !expr.isInstanceOf[Closure] => val expr1 = ascribeType(expr, pt) cpy.Block(block)(stats, expr1) withType expr1.tpe // no assignType here because avoid is redundant @@ -1223,10 +1161,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // and would not pass Ycheck. We have to use a cast instead. TODO: follow-up why // the cases arise and eliminate them, if possible. tree.cast(targetTpt) - } def noLeaks(t: Tree): Boolean = escapingRefs(t, localSyms).isEmpty if (noLeaks(tree)) tree - else { + else fullyDefinedType(tree.tpe, "block", tree.srcPos) var avoidingType = TypeOps.avoid(tree.tpe, localSyms) val ptDefined = isFullyDefined(pt, ForceDegree.none) @@ -1236,8 +1173,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // `ptDefined` needed because of special case of anonymous classes i"leak: ${escapingRefs(tree1, localSyms).toList}%, % in $tree1") tree1 - } - } def typedIf(tree: untpd.If, pt: Type)(using Context): Tree = if tree.isInline then checkInInlineContext("inline if", tree.srcPos) @@ -1253,11 +1188,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // This happens when the computation of the type of the if tree // uses GADT constraints. See #15646. def gadtAdaptBranch(tree: Tree, branchPt: Type): Tree = - TypeComparer.testSubType(tree.tpe.widenExpr, branchPt) match { + TypeComparer.testSubType(tree.tpe.widenExpr, branchPt) match case CompareResult.OKwithGADTUsed => insertGadtCast(tree, tree.tpe.widen, branchPt) case _ => tree - } val branchPt = if isIncomplete(tree) then defn.UnitType else pt.dropIfProto @@ -1302,17 +1236,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * def double(x: Char): String = s"$x$x" * "abc" flatMap double */ - private def decomposeProtoFunction(pt: Type, defaultArity: Int, pos: SrcPos)(using Context): (List[Type], untpd.Tree) = { - def typeTree(tp: Type) = tp match { + private def decomposeProtoFunction(pt: Type, defaultArity: Int, pos: SrcPos)(using Context): (List[Type], untpd.Tree) = + def typeTree(tp: Type) = tp match case _: WildcardType => new untpd.InferredTypeTree() case _ => untpd.InferredTypeTree(tp) - } - def interpolateWildcards = new TypeMap { + def interpolateWildcards = new TypeMap: def apply(t: Type): Type = t match case WildcardType(bounds: TypeBounds) => newTypeVar(apply(bounds.orElse(TypeBounds.empty)).bounds) case _ => mapOver(t) - } val pt1 = pt.strippedDealias.normalized if (pt1 ne pt1.dropDependentRefinement) @@ -1322,30 +1254,27 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer em"""Implementation restriction: Expected result type $pt1 |is a curried dependent context function type. Such types are not yet supported.""", pos) - pt1 match { + pt1 match case tp: TypeParamRef => decomposeProtoFunction(ctx.typerState.constraint.entry(tp).bounds.hi, defaultArity, pos) - case _ => pt1.findFunctionType match { - case pt1 if defn.isNonRefinedFunction(pt1) => + case _ => pt1.findFunctionType match + case pt1 if defn.isNonRefinedFunction(pt1) => // if expected parameter type(s) are wildcards, approximate from below. // if expected result type is a wildcard, approximate from above. // this can type the greatest set of admissible closures. - (pt1.argInfos.init, typeTree(interpolateWildcards(pt1.argInfos.last.hiBound))) - case RefinedType(parent, nme.apply, mt @ MethodTpe(_, formals, restpe)) - if (defn.isNonRefinedFunction(parent) || defn.isErasedFunctionType(parent)) && formals.length == defaultArity => - (formals, untpd.DependentTypeTree(syms => restpe.substParams(mt, syms.map(_.termRef)))) - case SAMType(mt @ MethodTpe(_, formals, restpe)) => - (formals, - if (mt.isResultDependent) - untpd.DependentTypeTree(syms => restpe.substParams(mt, syms.map(_.termRef))) - else + (pt1.argInfos.init, typeTree(interpolateWildcards(pt1.argInfos.last.hiBound))) + case RefinedType(parent, nme.apply, mt @ MethodTpe(_, formals, restpe)) + if (defn.isNonRefinedFunction(parent) || defn.isErasedFunctionType(parent)) && formals.length == defaultArity => + (formals, untpd.DependentTypeTree(syms => restpe.substParams(mt, syms.map(_.termRef)))) + case SAMType(mt @ MethodTpe(_, formals, restpe)) => + (formals, + if (mt.isResultDependent) + untpd.DependentTypeTree(syms => restpe.substParams(mt, syms.map(_.termRef))) + else typeTree(restpe)) - case _ => - (List.tabulate(defaultArity)(alwaysWildcardType), untpd.TypeTree()) - } - } - } + case _ => + (List.tabulate(defaultArity)(alwaysWildcardType), untpd.TypeTree()) /** The parameter type for a parameter in a lambda that does * not have an explicit type given, and where the type is not known from the context. @@ -1387,17 +1316,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (ctx.mode is Mode.Type) typedFunctionType(tree, pt) else typedFunctionValue(tree, pt) - def typedFunctionType(tree: untpd.Function, pt: Type)(using Context): Tree = { + def typedFunctionType(tree: untpd.Function, pt: Type)(using Context): Tree = val untpd.Function(args, body) = tree body match case untpd.CapturesAndResult(refs, result) => return typedUnadapted(untpd.makeRetaining( cpy.Function(tree)(args, result), refs, tpnme.retains), pt) case _ => - var (funFlags, erasedParams) = tree match { + var (funFlags, erasedParams) = tree match case tree: untpd.FunctionWithMods => (tree.mods.flags, tree.erasedParams) case _ => (EmptyFlags, args.map(_ => false)) - } val numArgs = args.length val isContextual = funFlags.is(Given) @@ -1442,7 +1370,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer RefinedTypeTree(core, List(appDef), ctx.owner.asClass) end typedDependent - args match { + args match case ValDef(_, _, _) :: _ => typedDependent(args.asInstanceOf[List[untpd.ValDef]])( using ctx.fresh.setOwner(newRefinedClassSymbol(tree.span)).setNewScope) @@ -1457,16 +1385,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case r: AppliedTypeTree if r.args.exists(_.tpe.isErasedClass) => typedFunctionType(desugar.makeFunctionWithValDefs(tree, pt), pt) case _ => result - } - } - def typedFunctionValue(tree: untpd.Function, pt: Type)(using Context): Tree = { + def typedFunctionValue(tree: untpd.Function, pt: Type)(using Context): Tree = val untpd.Function(params: List[untpd.ValDef] @unchecked, _) = tree: @unchecked - val (isContextual, isDefinedErased) = tree match { + val (isContextual, isDefinedErased) = tree match case tree: untpd.FunctionWithMods => (tree.mods.is(Given), tree.erasedParams) case _ => (false, tree.args.map(_ => false)) - } /** The function body to be returned in the closure. Can become a TypedSplice * of a typed expression if this is necessary to infer a parameter type. @@ -1481,16 +1406,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** If parameter `param` appears exactly once as an argument in `args`, * the singleton list consisting of its position in `args`, otherwise `Nil`. */ - def paramIndices(param: untpd.ValDef, args: List[untpd.Tree]): List[Int] = { - def loop(args: List[untpd.Tree], start: Int): List[Int] = args match { + def paramIndices(param: untpd.ValDef, args: List[untpd.Tree]): List[Int] = + def loop(args: List[untpd.Tree], start: Int): List[Int] = args match case arg :: args1 => val others = loop(args1, start + 1) if (refersTo(arg, param)) start :: others else others case _ => Nil - } val allIndices = loop(args, 0) if (allIndices.length == 1) allIndices else Nil - } /** A map from parameter names to unique positions where the parameter * appears in the argument list of an application. @@ -1517,7 +1440,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * @post: If result exists, `paramIndex` is defined for the name of * every parameter in `params`. */ - lazy val calleeType: Type = untpd.stripAnnotated(untpd.unsplice(fnBody)) match { + lazy val calleeType: Type = untpd.stripAnnotated(untpd.unsplice(fnBody)) match case ident: untpd.Ident if isContextual => val ident1 = typedIdent(ident, WildcardType) val tp = ident1.tpe.widen @@ -1538,23 +1461,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => val outerCtx = ctx val nestedCtx = outerCtx.fresh.setNewTyperState() - inContext(nestedCtx) { + inContext(nestedCtx): val protoArgs = args map (_ withType WildcardType) val callProto = FunProto(protoArgs, WildcardType)(this, app.applyKind) val expr1 = typedExpr(expr, callProto) if nestedCtx.reporter.hasErrors then NoType - else inContext(outerCtx) { + else inContext(outerCtx): nestedCtx.typerState.commit() fnBody = cpy.Apply(fnBody)(untpd.TypedSplice(expr1), args) expr1.tpe - } - } else NoType case _ => NoType - } - pt match { + pt match case pt: TypeVar if untpd.isFunctionWithUnknownParamType(tree) && !calleeType.exists => // try to instantiate `pt` if this is possible. If it does not @@ -1562,7 +1482,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // when we try to infer the parameter type. isFullyDefined(pt, ForceDegree.flipBottom) case _ => - } val (protoFormals, resultTpt) = decomposeProtoFunction(pt, params.length, tree.srcPos) @@ -1575,10 +1494,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def ptIsCorrectProduct(formal: Type) = isFullyDefined(formal, ForceDegree.flipBottom) && defn.isProductSubType(formal) && - tupleComponentTypes(formal).corresponds(params) { + tupleComponentTypes(formal).corresponds(params): (argType, param) => param.tpt.isEmpty || argType.widenExpr <:< typedAheadType(param.tpt).tpe - } var desugared: untpd.Tree = EmptyTree if protoFormals.length == 1 && params.length != 1 then @@ -1637,16 +1555,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typed(desugared, pt) .showing(i"desugared fun $tree --> $desugared with pt = $pt", typr) - } - def typedClosure(tree: untpd.Closure, pt: Type)(using Context): Tree = { + def typedClosure(tree: untpd.Closure, pt: Type)(using Context): Tree = val env1 = tree.env mapconserve (typed(_)) val meth1 = typedUnadapted(tree.meth) val target = if (tree.tpt.isEmpty) - meth1.tpe.widen match { + meth1.tpe.widen match case mt: MethodType => - pt.findFunctionType match { + pt.findFunctionType match case pt @ SAMType(sam) if !defn.isFunctionType(pt) && mt <:< sam => // SAMs of the form C[?] where C is a class cannot be conversion targets. @@ -1678,35 +1595,29 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer |because it has capture conversion skolem types""") else EmptyTree - } case tp => if !tp.isErroneous then throw new java.lang.Error(i"internal error: closing over non-method $tp, pos = ${tree.span}") TypeTree(defn.AnyType) - } else typed(tree.tpt) //println(i"typing closure $tree : ${meth1.tpe.widen}") assignType(cpy.Closure(tree)(env1, meth1, target), meth1, target) - } def typedMatch(tree: untpd.Match, pt: Type)(using Context): Tree = - tree.selector match { + tree.selector match case EmptyTree => - if (tree.isInline) { + if (tree.isInline) checkInInlineContext("summonFrom", tree.srcPos) - val cases1 = tree.cases.mapconserve { + val cases1 = tree.cases.mapconserve: case cdef @ CaseDef(pat @ Typed(Ident(nme.WILDCARD), _), _, _) => // case _ : T --> case evidence$n : T cpy.CaseDef(cdef)(pat = untpd.Bind(EvidenceParamName.fresh(), pat)) case cdef => cdef - } typedMatchFinish(tree, tpd.EmptyTree, defn.ImplicitScrutineeTypeRef, cases1, pt) - } - else { + else val (protoFormals, _) = decomposeProtoFunction(pt, 1, tree.srcPos) val checkMode = desugar.MatchCheck.Exhaustive typed(desugar.makeCaseLambda(tree.cases, checkMode, protoFormals.length).withSpan(tree.span), pt) - } case _ => if tree.isInline then checkInInlineContext("inline match", tree.srcPos) val sel1 = typedExpr(tree.selector) @@ -1726,33 +1637,29 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer && tree.cases .map(cas => untpd.unbind(untpd.unsplice(cas.pat))) .zip(mt.cases) - .forall { + .forall: case (pat: untpd.Typed, pt) => // To check that pattern types correspond we need to type // check `pat` here and throw away the result. val gadtCtx: Context = ctx.fresh.setFreshGADTBounds val pat1 = typedPattern(pat, selType)(using gadtCtx) val Typed(_, tpt) = tpd.unbind(tpd.unsplice(pat1)): @unchecked - instantiateMatchTypeProto(pat1, pt) match { + instantiateMatchTypeProto(pat1, pt) match case defn.MatchCase(patternTp, _) => tpt.tpe frozen_=:= patternTp case _ => false - } case (id @ Ident(nme.WILDCARD), pt) => - pt match { + pt match case defn.MatchCase(patternTp, _) => defn.AnyType frozen_=:= patternTp case _ => false - } case _ => false - } - val result = pt match { + val result = pt match case mt: MatchType if isMatchTypeShaped(mt) => typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) case MatchType.InDisguise(mt) if isMatchTypeShaped(mt) => typedDependentMatchFinish(tree, sel1, selType, tree.cases, mt) case _ => typedMatchFinish(tree, sel1, selType, tree.cases, pt) - } /** Are some form of brackets necessary to annotate the tree `sel` as `@unchecked`? * If so, return a Some(opening bracket, closing bracket), otherwise None. @@ -1767,9 +1674,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _: untpd.Block => Some("{", "}") case _ => None - result match { + result match case result @ Match(sel, CaseDef(pat, _, _) :: _) => - tree.selector.removeAttachment(desugar.CheckIrrefutable) match { + tree.selector.removeAttachment(desugar.CheckIrrefutable) match case Some(checkMode) if !sel.tpe.hasAnnotation(defn.UncheckedAnnot) => val isPatDef = checkMode == desugar.MatchCheck.IrrefutablePatDef if !checkIrrefutable(sel, pat, isPatDef) @@ -1794,16 +1701,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ) case _ => result - } case _ => result - } - } /** Special typing of Match tree when the expected type is a MatchType, * and the patterns of the Match tree and the MatchType correspond. */ - def typedDependentMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: MatchType)(using Context): Tree = { + def typedDependentMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: MatchType)(using Context): Tree = var caseCtx = ctx val cases1 = tree.cases.zip(pt.cases) .map { case (cas, tpe) => @@ -1813,14 +1717,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } .asInstanceOf[List[CaseDef]] assignType(cpy.Match(tree)(sel, cases1), sel, cases1).cast(pt) - } // Overridden in InlineTyper for inline matches - def typedMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: Type)(using Context): Tree = { + def typedMatchFinish(tree: untpd.Match, sel: Tree, wideSelType: Type, cases: List[untpd.CaseDef], pt: Type)(using Context): Tree = val cases1 = harmonic(harmonize, pt)(typedCases(cases, sel, wideSelType, pt.dropIfProto)) .asInstanceOf[List[CaseDef]] assignType(cpy.Match(tree)(sel, cases1), sel, cases1) - } def typedCases(cases: List[untpd.CaseDef], sel: Tree, wideSelType: Type, pt: Type)(using Context): List[CaseDef] = var caseCtx = ctx @@ -1834,49 +1736,43 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * run/reducable.scala is a test case that shows stripping typevars is necessary. * - enter all symbols introduced by a Bind in current scope */ - private def indexPattern(cdef: untpd.CaseDef)(using Context) = new TreeMap { - val stripTypeVars = new TypeMap { + private def indexPattern(cdef: untpd.CaseDef)(using Context) = new TreeMap: + val stripTypeVars = new TypeMap: def apply(t: Type) = mapOver(t) - } override def transform(trt: Tree)(using Context) = - super.transform(trt.withType(stripTypeVars(trt.tpe))) match { + super.transform(trt.withType(stripTypeVars(trt.tpe))) match case b: Bind => val sym = b.symbol assert(sym.name != tpnme.WILDCARD) if ctx.scope.lookup(b.name) == NoSymbol then ctx.enter(sym) else report.error(new DuplicateBind(b, cdef), b.srcPos) - if (!ctx.isAfterTyper) { + if (!ctx.isAfterTyper) val bounds = ctx.gadt.fullBounds(sym) if (bounds != null) sym.info = bounds - } b case t: UnApply if t.symbol.is(Inline) => Inlines.inlinedUnapply(t) case t => t - } - } /** If the prototype `pt` is the type lambda (when doing a dependent * typing of a match), instantiate that type lambda with the pattern * variables found in the pattern `pat`. */ - def instantiateMatchTypeProto(pat: Tree, pt: Type)(using Context) = pt match { + def instantiateMatchTypeProto(pat: Tree, pt: Type)(using Context) = pt match case caseTp: HKTypeLambda => val bindingsSyms = tpd.patVars(pat).reverse val bindingsTps = bindingsSyms.collect { case sym if sym.isType => sym.typeRef } caseTp.appliedTo(bindingsTps) case pt => pt - } /** Type a case. */ - def typedCase(tree: untpd.CaseDef, sel: Tree, wideSelType: Type, pt: Type)(using Context): CaseDef = { + def typedCase(tree: untpd.CaseDef, sel: Tree, wideSelType: Type, pt: Type)(using Context): CaseDef = val originalCtx = ctx val gadtCtx: Context = ctx.fresh.setFreshGADTBounds.setNewScope - def caseRest(pat: Tree)(using Context) = { - val pt1 = instantiateMatchTypeProto(pat, pt) match { + def caseRest(pat: Tree)(using Context) = + val pt1 = instantiateMatchTypeProto(pat, pt) match case defn.MatchCase(_, bodyPt) => bodyPt case pt => pt - } val pat1 = indexPattern(tree).transform(pat) val guard1 = typedExpr(tree.guard, defn.BooleanType) var body1 = ensureNoLocalRefs(typedExpr(tree.body, pt1), pt1, ctx.scope.toList) @@ -1890,39 +1786,32 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (pt1.isValueType) // insert a cast if body does not conform to expected type if we disregard gadt bounds body1 = body1.ensureConforms(pt1)(using originalCtx) assignType(cpy.CaseDef(tree)(pat1, guard1, body1), pat1, body1) - } val pat1 = typedPattern(tree.pat, wideSelType)(using gadtCtx) caseRest(pat1)( using Nullables.caseContext(sel, pat1)( using gadtCtx)) - } - def typedLabeled(tree: untpd.Labeled)(using Context): Labeled = { + def typedLabeled(tree: untpd.Labeled)(using Context): Labeled = val bind1 = typedBind(tree.bind, WildcardType).asInstanceOf[Bind] val expr1 = typed(tree.expr, bind1.symbol.info) assignType(cpy.Labeled(tree)(bind1, expr1)) - } /** Type a case of a type match */ - def typedTypeCase(cdef: untpd.CaseDef, selType: Type, pt: Type)(using Context): CaseDef = { - def caseRest(using Context) = { - val pat1 = withMode(Mode.Pattern) { + def typedTypeCase(cdef: untpd.CaseDef, selType: Type, pt: Type)(using Context): CaseDef = + def caseRest(using Context) = + val pat1 = withMode(Mode.Pattern): checkSimpleKinded(typedType(cdef.pat, mapPatternBounds = true)) - } if !ctx.isAfterTyper && pt != defn.ImplicitScrutineeTypeRef then - withMode(Mode.GadtConstraintInference) { + withMode(Mode.GadtConstraintInference): TypeComparer.constrainPatternType(pat1.tpe, selType) - } val pat2 = indexPattern(cdef).transform(pat1) var body1 = typedType(cdef.body, pt) if !body1.isType then assert(ctx.reporter.errorsReported) body1 = TypeTree(errorType(em"", cdef.srcPos)) assignType(cpy.CaseDef(cdef)(pat2, EmptyTree, body1), pat2, body1) - } caseRest(using ctx.fresh.setFreshGADTBounds.setNewScope) - } def typedReturn(tree: untpd.Return)(using Context): Return = @@ -1955,14 +1844,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer end typedReturn def typedWhileDo(tree: untpd.WhileDo)(using Context): Tree = - inContext(Nullables.whileContext(tree.span)) { + inContext(Nullables.whileContext(tree.span)): val cond1 = if (tree.cond eq EmptyTree) EmptyTree else typed(tree.cond, defn.BooleanType) val body1 = typed(tree.body, defn.UnitType)(using cond1.nullableContextIf(true)) assignType(cpy.WhileDo(tree)(cond1, body1)) .withNotNullInfo(body1.notNullInfo.retractedInfo.seq(cond1.notNullInfoIf(false))) - } /** Add givens reflecting `CanThrow` capabilities for all checked exceptions matched * by `cases`. The givens appear in nested blocks with earlier cases leading to @@ -1992,7 +1880,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val capabilityProof = caughtExceptions.reduce(OrType(_, _, true)) untpd.Block(makeCanThrow(capabilityProof), expr) - def typedTry(tree: untpd.Try, pt: Type)(using Context): Try = { + def typedTry(tree: untpd.Try, pt: Type)(using Context): Try = val expr2 :: cases2x = harmonic(harmonize, pt) { val cases1 = typedCases(tree.cases, EmptyTree, defn.ThrowableType, pt.dropIfProto) val expr1 = typed(addCanThrowCapabilities(tree.expr, cases1), pt.dropIfProto) @@ -2001,7 +1889,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val finalizer1 = typed(tree.finalizer, defn.UnitType) val cases2 = cases2x.asInstanceOf[List[CaseDef]] assignType(cpy.Try(tree)(expr2, cases2, finalizer1), expr2, cases2) - } def typedTry(tree: untpd.ParsedTry, pt: Type)(using Context): Try = val cases: List[untpd.CaseDef] = tree.handler match @@ -2025,22 +1912,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer Annotation(defn.RequiresCapabilityAnnot, cap, tree.span)))) else res - def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(using Context): SeqLiteral = { - val elemProto = pt.stripNull.elemType match { + def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(using Context): SeqLiteral = + val elemProto = pt.stripNull.elemType match case NoType => WildcardType case bounds: TypeBounds => WildcardType(bounds) case elemtp => elemtp - } def assign(elems1: List[Tree], elemtpt1: Tree) = assignType(cpy.SeqLiteral(tree)(elems1, elemtpt1), elems1, elemtpt1) - if (!tree.elemtpt.isEmpty) { + if (!tree.elemtpt.isEmpty) val elemtpt1 = typed(tree.elemtpt, elemProto) val elems1 = tree.elems.mapconserve(typed(_, elemtpt1.tpe)) assign(elems1, elemtpt1) - } - else { + else val elems1 = tree.elems.mapconserve(typed(_, elemProto)) val elemtptType = if (isFullyDefined(elemProto, ForceDegree.none)) @@ -2051,15 +1936,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer TypeComparer.lub(elems1.tpes) val elemtpt1 = typed(tree.elemtpt, elemtptType) assign(elems1, elemtpt1) - } - } - def typedInlined(tree: untpd.Inlined, pt: Type)(using Context): Tree = { + def typedInlined(tree: untpd.Inlined, pt: Type)(using Context): Tree = val (bindings1, exprCtx) = typedBlockStats(tree.bindings) val expansion1 = typed(tree.expansion, pt)(using inlineContext(tree.call)(using exprCtx)) assignType(cpy.Inlined(tree)(tree.call, bindings1.asInstanceOf[List[MemberDef]], expansion1), bindings1, expansion1) - } def completeTypeTree(tree: untpd.TypeTree, pt: Type, original: untpd.Tree)(using Context): TypeTree = tree.withSpan(original.span).withAttachmentsFrom(original) @@ -2072,7 +1954,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree match case tree: untpd.DerivedTypeTree => tree.ensureCompletions - tree.getAttachment(untpd.OriginalSymbol) match { + tree.getAttachment(untpd.OriginalSymbol) match case Some(origSym) => tree.derivedTree(origSym).withSpan(tree.span) // btw, no need to remove the attachment. The typed @@ -2081,17 +1963,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // accesses with typedTypeTree are done. case None => errorTree(tree, em"Something's wrong: missing original symbol for type tree") - } case _ => completeTypeTree(InferredTypeTree(), pt, tree) - def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = { + def typedSingletonTypeTree(tree: untpd.SingletonTypeTree)(using Context): SingletonTypeTree = val ref1 = typedExpr(tree.ref, SingletonTypeProto) checkStable(ref1.tpe, tree.srcPos, "singleton type") assignType(cpy.SingletonTypeTree(tree)(ref1), ref1) - } - def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(using Context): TypTree = { + def typedRefinedTypeTree(tree: untpd.RefinedTypeTree)(using Context): TypTree = val tpt1 = if (tree.tpt.isEmpty) TypeTree(defn.ObjectType) else typedAheadType(tree.tpt) val refineClsDef = desugar.refinedTypeToClass(tpt1, tree.refinements).withSpan(tree.span) val refineCls = createSymbol(refineClsDef).asClass @@ -2099,7 +1979,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val refinements1 = impl.body assert(tree.refinements.hasSameLengthAs(refinements1), i"${tree.refinements}%, % > $refinements1%, %") val seen = mutable.Set[Symbol]() - for (refinement <- refinements1) { // TODO: get clarity whether we want to enforce these conditions + for (refinement <- refinements1) // TODO: get clarity whether we want to enforce these conditions typr.println(s"adding refinement $refinement") checkRefinementNonCyclic(refinement, refineCls, seen) val rsym = refinement.symbol @@ -2113,11 +1993,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val member = refineCls.info.member(rsym.name) if (member.isOverloaded) report.error(OverloadInRefinement(rsym), refinement.srcPos) - } assignType(cpy.RefinedTypeTree(tree)(tpt1, refinements1), tpt1, refinements1, refineCls) - } - def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(using Context): Tree = { + def typedAppliedTypeTree(tree: untpd.AppliedTypeTree)(using Context): Tree = tree.args match case arg :: _ if arg.isTerm => if Feature.dependentEnabled then @@ -2126,24 +2004,21 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer return errorTree(tree, dependentMsg) case _ => - val tpt1 = withoutMode(Mode.Pattern) { + val tpt1 = withoutMode(Mode.Pattern): typed(tree.tpt, AnyTypeConstructorProto) - } val tparams = tpt1.tpe.typeParams - if tpt1.tpe.isError then + if tpt1.tpe.isError then val args1 = tree.args.mapconserve(typedType(_)) assignType(cpy.AppliedTypeTree(tree)(tpt1, args1), tpt1, args1) - else if (tparams.isEmpty) { + else if (tparams.isEmpty) report.error(TypeDoesNotTakeParameters(tpt1.tpe, tree.args), tree.srcPos) tpt1 - } - else { + else var args = tree.args - val args1 = { - if (args.length != tparams.length) { + val args1 = + if (args.length != tparams.length) wrongNumberOfTypeArgs(tpt1.tpe, tparams, args, tree.srcPos) args = args.take(tparams.length) - } // If type constructor is not a class type, we need to eliminate // any references to other parameter types of the underlying hk lambda @@ -2166,7 +2041,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => super.apply(tp) widenMap(bounds) - def typedArg(arg: untpd.Tree, tparam: ParamInfo) = { + def typedArg(arg: untpd.Tree, tparam: ParamInfo) = def tparamBounds = val bounds = tparam.paramInfoAsSeenFrom(tpt1.tpe.appliedTo(tparams.map(_ => TypeBounds.empty))) @@ -2180,13 +2055,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else (arg, WildcardType) if (tpt1.symbol.isClass) - tparam match { + tparam match case tparam: Symbol => tparam.ensureCompleted() // This is needed to get the test `compileParSetSubset` to work case _ => - } if (desugaredArg.isType) - arg match { + arg match case untpd.WildcardTypeBoundsTree() if tparam.paramInfo.isLambdaSub && tpt1.tpe.typeParamSymbols.nonEmpty => @@ -2199,19 +2073,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer TypeTree(tparamBounds).withSpan(arg.span) case _ => typedType(desugaredArg, argPt, mapPatternBounds = true) - } else desugaredArg.withType(UnspecifiedErrorType) - } args.zipWithConserve(tparams)(typedArg) - } - val paramBounds = tparams.lazyZip(args).map { + val paramBounds = tparams.lazyZip(args).map: case (tparam, untpd.WildcardTypeBoundsTree()) => // if type argument is a wildcard, suppress kind checking since // there is no real argument. NoType case (tparam, _) => tparam.paramInfo.bounds - } var checkedArgs = preCheckKinds(args1, paramBounds) // check that arguments conform to bounds is done in phase PostTyper val tycon = tpt1.symbol @@ -2238,16 +2108,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer report.error(em"not found: ", tpt1.srcPos) else if (ctx.isJava) if tycon eq defn.ArrayClass then - checkedArgs match { + checkedArgs match case List(arg) => val elemtp = arg.tpe.translateJavaArrayElementType if (elemtp ne arg.tpe) checkedArgs = List(TypeTree(elemtp).withSpan(arg.span)) case _ => - } assignType(cpy.AppliedTypeTree(tree)(tpt1, checkedArgs), tpt1, checkedArgs) - } - } private def typeIndexedLambdaTypeTree( tree: untpd.LambdaTypeTree, tparams: List[untpd.TypeDef], body: untpd.Tree)(using Context) = @@ -2266,7 +2133,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else errorTree(tree, dependentMsg) - def typedMatchTypeTree(tree: untpd.MatchTypeTree, pt: Type)(using Context): Tree = { + def typedMatchTypeTree(tree: untpd.MatchTypeTree, pt: Type)(using Context): Tree = val bound1 = if (tree.bound.isEmpty && isFullyDefined(pt, ForceDegree.none)) TypeTree(pt) else typed(tree.bound) @@ -2277,7 +2144,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val pt1 = if (bound1.isEmpty) pt else bound1.tpe val cases1 = tree.cases.mapconserve(typedTypeCase(_, sel1Tpe, pt1)) assignType(cpy.MatchTypeTree(tree)(bound1, sel1, cases1), bound1, sel1, cases1) - } def typedByNameTypeTree(tree: untpd.ByNameTypeTree)(using Context): ByNameTypeTree = tree.result match case untpd.CapturesAndResult(refs, tpe) => @@ -2296,11 +2162,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val hi2 = if (hi1.isEmpty) typed(untpd.TypeTree(defn.AnyType)) else hi1 assignType(cpy.TypeBoundsTree(tree)(lo2, hi2, alias1), lo2, hi2, alias1) - def typedBind(tree: untpd.Bind, pt: Type)(using Context): Tree = { + def typedBind(tree: untpd.Bind, pt: Type)(using Context): Tree = if !isFullyDefined(pt, ForceDegree.all) then return errorTree(tree, em"expected type of $tree is not fully defined") val body1 = typed(tree.body, pt) - body1 match { + body1 match case UnApply(fn, Nil, arg :: Nil) if fn.symbol.exists && (fn.symbol.owner.derivesFrom(defn.TypeTestClass) || fn.symbol.owner == defn.ClassTagClass) && !body1.tpe.isError => // A typed pattern `x @ (e: T)` with an implicit `tt: TypeTest[T]` or `ctag: ClassTag[T]` @@ -2310,12 +2176,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typed(untpd.Bind(tree.name, untpd.TypedSplice(arg)).withSpan(tree.span), arg.tpe) :: Nil) case _ => var name = tree.name - if (name == nme.WILDCARD && tree.mods.is(Given)) { + if (name == nme.WILDCARD && tree.mods.is(Given)) val Typed(_, tpt) = tree.body: @unchecked name = desugar.inventGivenOrExtensionName(tpt) - } if (name == nme.WILDCARD) body1 - else { + else // In `x @ Nil`, `Nil` is a _stable identifier pattern_ and will be compiled // to an `==` test, so the type of `x` is unrelated to the type of `Nil`. // Similarly, in `x @ 1`, `1` is a _literal pattern_ and will also be compiled @@ -2345,11 +2210,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (ctx.mode.is(Mode.InPatternAlternative)) report.error(IllegalVariableInPatternAlternative(sym.name), tree.srcPos) assignType(cpy.Bind(tree)(name, body1), sym) - } - } - } - def typedAlternative(tree: untpd.Alternative, pt: Type)(using Context): Alternative = { + def typedAlternative(tree: untpd.Alternative, pt: Type)(using Context): Alternative = val nestedCtx = ctx.addMode(Mode.InPatternAlternative) def ensureValueTypeOrWildcard(tree: Tree) = if tree.tpe.isValueTypeOrWildcard then tree @@ -2359,7 +2221,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val trees1 = tree.trees.mapconserve(typed(_, pt)(using nestedCtx)) .mapconserve(ensureValueTypeOrWildcard) assignType(cpy.Alternative(tree)(trees1), trees1) - } /** The context to be used for an annotation of `mdef`. * This should be the context enclosing `mdef`, or if `mdef` defines a parameter @@ -2382,7 +2243,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => if outer.owner.isClass then local else outer - def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(using Context): Unit = { + def completeAnnotations(mdef: untpd.MemberDef, sym: Symbol)(using Context): Unit = // necessary to force annotation trees to be computed. sym.annotations.foreach(_.ensureCompleted) lazy val annotCtx = annotContext(mdef, sym) @@ -2392,7 +2253,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer checkAnnotApplicable(annot1, sym) if Annotations.annotClass(annot1) == defn.NowarnAnnot then registerNowarn(annot1, mdef) - } def typedAnnotation(annot: untpd.Tree)(using Context): Tree = checkAnnotClass(checkAnnotArgs(typed(annot))) @@ -2403,12 +2263,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer var verbose = false val filters = annot.argumentConstantString(0) match case None => annot.argument(0) match - case Some(t: Select) if t.name.is(DefaultGetterName) => + case Some(t: Select) if t.name.is(DefaultGetterName) => // default argument used for `@nowarn` and `@nowarn()` - List(MessageFilter.Any) - case _ => - report.warning(s"filter needs to be a compile-time constant string", argPos) - List(MessageFilter.None) + List(MessageFilter.Any) + case _ => + report.warning(s"filter needs to be a compile-time constant string", argPos) + List(MessageFilter.None) case Some("") => List(MessageFilter.Any) case Some("verbose") | Some("v") => @@ -2425,22 +2285,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if filters == List(MessageFilter.None) then sup.markUsed() ctx.run.nn.suppressions.addSuppression(sup) - def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = { + def typedValDef(vdef: untpd.ValDef, sym: Symbol)(using Context): Tree = val ValDef(name, tpt, _) = vdef completeAnnotations(vdef, sym) if (sym.isOneOf(GivenOrImplicit)) checkImplicitConversionDefOK(sym) if sym.is(Module) then checkNoModuleClash(sym) val tpt1 = checkSimpleKinded(typedType(tpt)) - val rhs1 = vdef.rhs match { + val rhs1 = vdef.rhs match case rhs @ Ident(nme.WILDCARD) => rhs withType tpt1.tpe case rhs => typedExpr(rhs, tpt1.tpe.widenExpr) - } val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym) postProcessInfo(sym) vdef1.setDefTree - } - def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = { + def typedDefDef(ddef: untpd.DefDef, sym: Symbol)(using Context): Tree = def canBeInvalidated(sym: Symbol): Boolean = sym.is(Synthetic) && (desugar.isRetractableCaseClassMethodName(sym.name) || @@ -2468,9 +2326,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val tpt1 = checkSimpleKinded(typedType(tpt)) val rhsCtx = ctx.fresh - val tparamss = paramss1.collect { + val tparamss = paramss1.collect: case untpd.TypeDefs(tparams) => tparams - } // Register GADT constraint for class type parameters from outer to inner class definition. (Useful when nested classes exist.) But do not cross a function definition. if sym.flags.is(Method) then @@ -2548,7 +2405,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer postProcessInfo(sym) ddef2.setDefTree //todo: make sure dependent method types do not depend on implicits or by-name params - } /** (1) Check that the signature of the class member does not return a repeated parameter type * (2) If info is an erased class, set erased flag of member @@ -2559,7 +2415,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if !sym.is(Module) && !sym.isConstructor && sym.info.finalResultType.isErasedClass then sym.setFlag(Erased) - def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = { + def typedTypeDef(tdef: untpd.TypeDef, sym: Symbol)(using Context): Tree = val TypeDef(name, rhs) = tdef completeAnnotations(tdef, sym) val rhs1 = tdef.rhs match @@ -2570,9 +2426,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer checkFullyAppliedType(rhs1) if sym.isOpaqueAlias then checkNoContextFunctionType(rhs1) assignType(cpy.TypeDef(tdef)(name, rhs1), sym) - } - def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = { + def typedClassDef(cdef: untpd.TypeDef, cls: ClassSymbol)(using Context): Tree = if (!cls.info.isInstanceOf[ClassInfo]) return EmptyTree.assertingErrorsReported val TypeDef(name, impl @ Template(constr, _, self, _)) = cdef: @unchecked @@ -2644,22 +2499,19 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer ptrees /** Checks if one of the decls is a type with the same name as class type member in selfType */ - def classExistsOnSelf(decls: Scope, self: tpd.ValDef): Boolean = { + def classExistsOnSelf(decls: Scope, self: tpd.ValDef): Boolean = val selfType = self.tpt.tpe if (!selfType.exists || (selfType.classSymbol eq cls)) false - else { + else def memberInSelfButNotThis(decl: Symbol) = selfType.member(decl.name).symbol.filter(other => other.isClass && other.owner != cls) decls.iterator.filter(_.isType).foldLeft(false) { (foundRedef, decl) => val other = memberInSelfButNotThis(decl) - if (other.exists) { + if (other.exists) val msg = CannotHaveSameNameAs(decl, other, CannotHaveSameNameAs.DefinedInSelf(self)) report.error(msg, decl.srcPos) - } foundRedef || other.exists } - } - } ensureCorrectSuperClass() completeAnnotations(cdef, cls) @@ -2676,7 +2528,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (self1.tpt.tpe.isError || classExistsOnSelf(cls.unforcedDecls, self1)) // fail fast to avoid typing the body with an error type cdef.withType(UnspecifiedErrorType) - else { + else val dummy = localDummy(cls, impl) val body1 = addAccessorDefs(cls, typedStats(impl.body, dummy)(using ctx.inClassContext(self1.symbol))._1) @@ -2694,10 +2546,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cdef1.symbol.ne(defn.DynamicClass) && cdef1.tpe.derivesFrom(defn.DynamicClass) && !Feature.dynamicsEnabled - if (reportDynamicInheritance) { + if (reportDynamicInheritance) val isRequired = parents1.exists(_.tpe.isRef(defn.DynamicClass)) report.featureWarning(nme.dynamics.toString, "extension of type scala.Dynamic", cls, isRequired, cdef.srcPos) - } checkNonCyclicInherited(cls.thisType, cls.info.parents, cls.info.decls, cdef.srcPos) @@ -2706,7 +2557,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // check PolyFunction constraints (no erased functions!) if parents1.exists(_.tpe.classSymbol eq defn.PolyFunctionClass) then - body1.foreach { + body1.foreach: case ddef: DefDef => ddef.paramss.foreach { params => val erasedParam = params.collectFirst { case vdef: ValDef if vdef.symbol.is(Erased) => vdef } @@ -2715,7 +2566,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } } case _ => - } val effectiveOwner = cls.owner.skipWeakOwner if !cls.isRefinementClass @@ -2734,8 +2584,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer cdef1.putAttachment(AttachedDeriver, deriver) cdef1 - } - } // todo later: check that // 1. If class is non-abstract, it is instantiatable: @@ -2816,7 +2664,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val pkg = pid1.symbol pid1 match case pid1: RefTree if pkg.is(Package) => - inContext(ctx.packageContext(tree, pkg)) { + inContext(ctx.packageContext(tree, pkg)): // If it exists, complete the class containing the top-level definitions // before typing any statement in the package to avoid cycles as in i13669.scala val topLevelClassName = desugar.packageObjectName(ctx.source).moduleClassName @@ -2825,7 +2673,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (!ctx.isAfterTyper) stats1 = stats1 ++ typedBlockStats(MainProxies.proxies(stats1))._1 cpy.PackageDef(tree)(pid1, stats1).withType(pkg.termRef) - } case _ => // Package will not exist if a duplicate type has already been entered, see `tests/neg/1708.scala` errorTree(tree, @@ -2833,13 +2680,13 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else em"package ${tree.pid.name} does not exist") end typedPackageDef - def typedAnnotated(tree: untpd.Annotated, pt: Type)(using Context): Tree = { + def typedAnnotated(tree: untpd.Annotated, pt: Type)(using Context): Tree = val annot1 = checkAnnotClass(typedExpr(tree.annot)) val annotCls = Annotations.annotClass(annot1) if annotCls == defn.NowarnAnnot then registerNowarn(annot1, tree) val arg1 = typed(tree.arg, pt) - if (ctx.mode is Mode.Type) { + if (ctx.mode is Mode.Type) val cls = annot1.symbol.maybeOwner if Feature.ccEnabled && (cls == defn.RetainsAnnot || cls == defn.RetainsByNameAnnot) @@ -2850,62 +2697,53 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else assert(ctx.reporter.errorsReported) TypeTree(UnspecifiedErrorType) - } - else { - val arg2 = arg1 match { + else + val arg2 = arg1 match case Typed(arg2, tpt: TypeTree) => - tpt.tpe match { + tpt.tpe match case _: AnnotatedType => // Avoid creating a Typed tree for each type annotation that is added. // Drop the outer Typed tree and use its type with the addition all annotation. arg2 case _ => arg1 - } case _ => arg1 - } val argType = if (arg1.isInstanceOf[Bind]) arg1.tpe.widen // bound symbol is not accessible outside of Bind node else arg1.tpe.widenIfUnstable val annotatedTpt = TypeTree(AnnotatedType(argType, Annotation(annot1))) assignType(cpy.Typed(tree)(arg2, annotatedTpt), annotatedTpt) - } - } def typedTypedSplice(tree: untpd.TypedSplice)(using Context): Tree = - tree.splice match { + tree.splice match case tree1: TypeTree => tree1 // no change owner necessary here ... case tree1: Ident => tree1 // ... or here, since these trees cannot contain bindings case tree1 => if (ctx.owner ne tree.owner) tree1.changeOwner(tree.owner, ctx.owner) else tree1 - } - def typedAsFunction(tree: untpd.PostfixOp, pt: Type)(using Context): Tree = { + def typedAsFunction(tree: untpd.PostfixOp, pt: Type)(using Context): Tree = val untpd.PostfixOp(qual, Ident(nme.WILDCARD)) = tree: @unchecked val pt1 = if (defn.isFunctionType(pt)) pt else AnyFunctionProto val nestedCtx = ctx.fresh.setNewTyperState() val res = typed(qual, pt1)(using nestedCtx) - res match { + res match case closure(_, _, _) => case _ => val recovered = typed(qual)(using ctx.fresh.setExploreTyperState()) report.errorOrMigrationWarning(OnlyFunctionsCanBeFollowedByUnderscore(recovered.tpe.widen), tree.srcPos, from = `3.0`) - if (migrateTo3) { + if (migrateTo3) // Under -rewrite, patch `x _` to `(() => x)` patch(Span(tree.span.start), "(() => ") patch(Span(qual.span.end, tree.span.end), ")") return typed(untpd.Function(Nil, qual), pt) - } - } nestedCtx.typerState.commit() if sourceVersion.isAtLeast(future) then - lazy val (prefix, suffix) = res match { + lazy val (prefix, suffix) = res match case Block(mdef @ DefDef(_, vparams :: Nil, _, _) :: Nil, _: Closure) => val arity = vparams.length if (arity > 0) ("", "") else ("(() => ", "())") case _ => ("(() => ", ")") - } def remedy = if ((prefix ++ suffix).isEmpty) "simply leave out the trailing ` _`" else s"use `$prefix$suffix` instead" @@ -2919,7 +2757,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer patch(Span(qual.span.end, tree.span.end), suffix) end if res - } /** Translate infix operation expression `l op r` to * @@ -2930,7 +2767,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * Translate infix type `l op r` to `op[l, r]` * Translate infix pattern `l op r` to `op(l, r)` */ - def typedInfixOp(tree: untpd.InfixOp, pt: Type)(using Context): Tree = { + def typedInfixOp(tree: untpd.InfixOp, pt: Type)(using Context): Tree = val untpd.InfixOp(l, op, r) = tree val result = if (ctx.mode.is(Mode.Type)) @@ -2940,7 +2777,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else cpy.AppliedTypeTree(tree)(op, l :: r :: Nil)) else if (ctx.mode.is(Mode.Pattern)) typedUnApply(cpy.Apply(tree)(op, l :: r :: Nil), pt) - else { + else val app = typedApply(desugar.binop(l, op, r), pt) if op.name.isRightAssocOperatorName then val defs = new mutable.ListBuffer[Tree] @@ -2954,20 +2791,18 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tpd.cpy.Block(app)(stats, lift(expr)) wrapDefs(defs, lift(app)) else app - } // issue 10383: we stripBlock because e.g. default arguments desugar to blocks during typing, // and the block itself doesn't have a symbol (because a Block isn't a ProxyTree), // but the last expression in the block does have the right symbol checkValidInfix(tree, stripBlock(result).symbol) result - } /** Translate tuples of all arities */ - def typedTuple(tree: untpd.Tuple, pt: Type)(using Context): Tree = { + def typedTuple(tree: untpd.Tuple, pt: Type)(using Context): Tree = val arity = tree.trees.length if (arity <= Definitions.MaxTupleArity) typed(desugar.smallTuple(tree).withSpan(tree.span), pt) - else { + else val pts = pt.tupleElementTypes match case Some(types) if types.size == arity => types @@ -2978,31 +2813,26 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (ctx.mode.is(Mode.Type)) elems.foldRight(TypeTree(defn.EmptyTupleModule.termRef): Tree)((elemTpt, elemTpts) => AppliedTypeTree(TypeTree(defn.PairClass.typeRef), List(elemTpt, elemTpts))) - .withSpan(tree.span) - else { + .withSpan(tree.span) + else val tupleXXLobj = untpd.ref(defn.TupleXXLModule.termRef) val app = untpd.cpy.Apply(tree)(tupleXXLobj, elems.map(untpd.TypedSplice(_))) .withSpan(tree.span) val app1 = typed(app, if ctx.mode.is(Mode.Pattern) then pt else defn.TupleXXLClass.typeRef) if (ctx.mode.is(Mode.Pattern)) app1 - else { + else val elemTpes = elems.lazyZip(pts).map((elem, pt) => TypeComparer.widenInferred(elem.tpe, pt, widenUnions = true)) val resTpe = TypeOps.nestedPairs(elemTpes) app1.cast(resTpe) - } - } - } - } /** Retrieve symbol attached to given tree */ - protected def retrieveSym(tree: untpd.Tree)(using Context): Symbol = tree.removeAttachment(SymOfTree) match { + protected def retrieveSym(tree: untpd.Tree)(using Context): Symbol = tree.removeAttachment(SymOfTree) match case Some(sym) => sym.ensureCompleted() sym case none => NoSymbol - } protected def localTyper(sym: Symbol): Typer = nestedTyper.remove(sym).get @@ -3015,16 +2845,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * @param locked the set of type variables of the current typer state that cannot be interpolated * at the present time */ - def typedUnadapted(initTree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = { + def typedUnadapted(initTree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = record("typedUnadapted") val xtree = expanded(initTree) - xtree.removeAttachment(TypedAhead) match { + xtree.removeAttachment(TypedAhead) match case Some(ttree) => ttree case none => - def typedNamed(tree: untpd.NameTree, pt: Type)(using Context): Tree = { + def typedNamed(tree: untpd.NameTree, pt: Type)(using Context): Tree = val sym = retrieveSym(xtree) - tree match { + tree match case tree: untpd.Ident => typedIdent(tree, pt) case tree: untpd.Select => typedSelect(tree, pt) case tree: untpd.Bind => typedBind(tree, pt) @@ -3052,10 +2882,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedTypeOrClassDef case tree: untpd.Labeled => typedLabeled(tree) case _ => typedUnadapted(desugar(tree, pt), pt, locked) - } - } - def typedUnnamed(tree: untpd.Tree): Tree = tree match { + def typedUnnamed(tree: untpd.Tree): Tree = tree match case tree: untpd.Apply => if (ctx.mode is Mode.Pattern) typedUnApply(tree, pt) else typedApply(tree, pt) case tree: untpd.This => typedThis(tree) @@ -3106,7 +2934,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case tree: untpd.MacroTree => report.error("Unexpected macro", tree.srcPos); tpd.nullLiteral // ill-formed code may reach here case tree: untpd.Hole => typedHole(tree, pt) case _ => typedUnadapted(desugar(tree, pt), pt, locked) - } try val ifpt = defn.asContextFunctionType(pt) @@ -3128,8 +2955,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // use focussed sourcePos since tree might be a large definition // and a large error span would hide all errors in interior. // TODO: Not clear that hiding is what we want, actually - } - } /** Interpolate and simplify the type of the given tree. */ protected def simplify(tree: Tree, pt: Type, locked: TypeVars)(using Context): tree.type = @@ -3143,7 +2968,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree.overwriteType(simplified) tree - protected def makeContextualFunction(tree: untpd.Tree, pt: Type)(using Context): Tree = { + protected def makeContextualFunction(tree: untpd.Tree, pt: Type)(using Context): Tree = val defn.FunctionOf(formals, _, true) = pt.dropDependentRefinement: @unchecked // The getter of default parameters may reach here. @@ -3166,25 +2991,22 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // // see tests/pos/i7778b.scala - val paramTypes = { + val paramTypes = val hasWildcard = formals.exists(_.existsPart(_.isInstanceOf[WildcardType], StopAt.Static)) if hasWildcard then formals.map(_ => untpd.TypeTree()) else formals.map(untpd.TypeTree) - } - val erasedParams = pt.dealias match { + val erasedParams = pt.dealias match case RefinedType(parent, nme.apply, mt: MethodType) => mt.erasedParams case _ => paramTypes.map(_ => false) - } val ifun = desugar.makeContextualFunction(paramTypes, tree, erasedParams) typr.println(i"make contextual function $tree / $pt ---> $ifun") typedFunctionValue(ifun, pt) - } /** Typecheck and adapt tree, returning a typed tree. Parameters as for `typedUnadapted` */ def typed(tree: untpd.Tree, pt: Type, locked: TypeVars)(using Context): Tree = - trace(i"typing $tree, pt = $pt", typr, show = true) { + trace(i"typing $tree, pt = $pt", typr, show = true): record(s"typed $getClass") record("typed total") if ctx.phase.isTyper then @@ -3194,7 +3016,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else if ctx.run.nn.isCancelled then tree.withType(WildcardType) else adapt(typedUnadapted(tree, pt, locked), pt, locked) - } def typed(tree: untpd.Tree, pt: Type = WildcardType)(using Context): Tree = typed(tree, pt, ctx.typerState.ownedVars) @@ -3202,24 +3023,24 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedTrees(trees: List[untpd.Tree])(using Context): List[Tree] = trees mapconserve (typed(_)) - def typedStats(stats: List[untpd.Tree], exprOwner: Symbol)(using Context): (List[Tree], Context) = { + def typedStats(stats: List[untpd.Tree], exprOwner: Symbol)(using Context): (List[Tree], Context) = val buf = new mutable.ListBuffer[Tree] var enumContexts: SimpleIdentityMap[Symbol, Context] = SimpleIdentityMap.empty val initialNotNullInfos = ctx.notNullInfos // A map from `enum` symbols to the contexts enclosing their definitions - @tailrec def traverse(stats: List[untpd.Tree])(using Context): (List[Tree], Context) = stats match { + @tailrec def traverse(stats: List[untpd.Tree])(using Context): (List[Tree], Context) = stats match case (imp: untpd.Import) :: rest => val imp1 = typed(imp) buf += imp1 traverse(rest)(using ctx.importContext(imp, imp1.symbol)) case (mdef: untpd.DefTree) :: rest => - mdef.removeAttachment(ExpandedTree) match { + mdef.removeAttachment(ExpandedTree) match case Some(xtree) => traverse(xtree :: rest) case none => val newCtx = if (ctx.owner.isTerm && adaptCreationContext(mdef)) ctx else ctx.withNotNullInfos(initialNotNullInfos) - typed(mdef)(using newCtx) match { + typed(mdef)(using newCtx) match case mdef1: DefDef if mdef1.symbol.is(Inline, butNot = Deferred) && !Inlines.bodyToInline(mdef1.symbol).isEmpty => buf ++= inlineExpansion(mdef1) @@ -3232,9 +3053,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // clashing synthetic case methods are converted to empty trees, drop them here case mdef1 => buf += mdef1 - } traverse(rest) - } case Thicket(stats) :: rest => traverse(stats ::: rest) case (stat: untpd.Export) :: rest => @@ -3252,24 +3071,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer traverse(rest)(using stat1.nullableContext) case nil => (buf.toList, ctx) - } - def finalize(stat: Tree)(using Context): Tree = stat match { + def finalize(stat: Tree)(using Context): Tree = stat match case stat: TypeDef if stat.symbol.is(Module) => val enumContext = enumContexts(stat.symbol.linkedClass) if enumContext != null then checkEnumCaseRefsLegal(stat, enumContext) - stat.removeAttachment(AttachedDeriver) match { + stat.removeAttachment(AttachedDeriver) match case Some(deriver) => deriver.finalize(stat) case None => stat - } case _ => stat - } val (stats0, finalCtx) = traverse(stats) val stats1 = stats0.mapConserve(finalize) if ctx.owner == exprOwner then checkNoTargetNameConflict(stats1) (stats1, finalCtx) - } /** Tries to adapt NotNullInfos from creation context to the DefTree, * returns whether the adaption took place. An adaption only takes place if the @@ -3280,24 +3095,22 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // cannot be executed out-of-sequence. // We have to check the Completer of symbol befor typedValDef, // otherwise the symbol is already completed using creation context. - mdef.getAttachment(SymOfTree) match { - case Some(sym) => sym.infoOrCompleter match { - case completer: Namer#Completer => - if (completer.creationContext.notNullInfos ne ctx.notNullInfos) + mdef.getAttachment(SymOfTree) match + case Some(sym) => sym.infoOrCompleter match + case completer: Namer#Completer => + if (completer.creationContext.notNullInfos ne ctx.notNullInfos) // The RHS of a val def should know about not null facts established // in preceding statements (unless the DefTree is completed ahead of time, // then it is impossible). - sym.info = Completer(completer.original)( + sym.info = Completer(completer.original)( completer.creationContext.withNotNullInfos(ctx.notNullInfos)) - true - case _ => + true + case _ => // If it has been completed, then it must be because there is a forward reference // to the definition in the program. Hence, we don't Keep preceding not null facts // in the current context. - false - } + false case _ => false - } /** Given an inline method `mdef`, the method rewritten so that its body * uses accessors to access non-public members. Also, if the inline method @@ -3331,19 +3144,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer def typedPattern(tree: untpd.Tree, selType: Type = WildcardType)(using Context): Tree = withMode(Mode.Pattern)(typed(tree, selType)) - def tryEither[T](op: Context ?=> T)(fallBack: (T, TyperState) => T)(using Context): T = { + def tryEither[T](op: Context ?=> T)(fallBack: (T, TyperState) => T)(using Context): T = val nestedCtx = ctx.fresh.setNewTyperState() val result = op(using nestedCtx) - if (nestedCtx.reporter.hasErrors && !nestedCtx.reporter.hasStickyErrors) { + if (nestedCtx.reporter.hasErrors && !nestedCtx.reporter.hasStickyErrors) record("tryEither.fallBack") fallBack(result, nestedCtx.typerState) - } - else { + else record("tryEither.commit") nestedCtx.typerState.commit() result - } - } /** Try `op1`, if there are errors, try `op2`, if `op2` also causes errors, fall back * to errors and result of `op1`. @@ -3357,11 +3167,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer } /** Is `pt` a prototype of an `apply` selection, or a parameterless function yielding one? */ - def isApplyProto(pt: Type)(using Context): Boolean = pt.revealIgnored match { + def isApplyProto(pt: Type)(using Context): Boolean = pt.revealIgnored match case pt: SelectionProto => pt.name == nme.apply case pt: FunProto => pt.args.isEmpty && isApplyProto(pt.resultType) case _ => false - } /** Potentially add apply node or implicit conversions. Before trying either, * if the function is applied to an empty parameter list (), we try @@ -3384,17 +3193,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * if an apply insertion was tried and `tree` has an `apply` method, or continues * with `fallBack` otherwise. `fallBack` is supposed to always give an error. */ - def tryInsertApplyOrImplicit(tree: Tree, pt: ProtoType, locked: TypeVars)(fallBack: => Tree)(using Context): Tree = { - def isMethod(tree: Tree) = tree.tpe match { + def tryInsertApplyOrImplicit(tree: Tree, pt: ProtoType, locked: TypeVars)(fallBack: => Tree)(using Context): Tree = + def isMethod(tree: Tree) = tree.tpe match case ref: TermRef => ref.denot.alternatives.forall(_.info.widen.isInstanceOf[MethodicType]) case _ => false - } var assumeApplyExists = false // if true, issue any errors about the apply instead of `fallBack`, // since they are more likely to be informative. - def tryApply(using Context) = { + def tryApply(using Context) = val pt1 = pt.withContext(ctx) val sel = typedSelect(untpd.Select(untpd.TypedSplice(tree), nme.apply), pt1) .withAttachment(InsertedApply, ()) @@ -3410,7 +3218,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer else assumeApplyExists = true try adapt(simplify(sel, pt1, locked), pt1, locked) finally sel.removeAttachment(InsertedApply) - } def tryImplicit(fallBack: => Tree) = tryInsertImplicitOnQualifier(tree, pt.withContext(ctx), locked).getOrElse(fallBack) @@ -3418,7 +3225,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if (ctx.mode.is(Mode.SynthesizeExtMethodReceiver)) // Suppress insertion of apply or implicit conversion on extension method receiver tree - else pt match { + else pt match case pt @ FunProto(Nil, _) if tree.symbol.allOverriddenSymbols.exists(_.info.isNullaryMethod) && !tree.hasAttachment(DroppedEmptyArgs) => @@ -3428,15 +3235,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => if (isApplyProto(pt) || isMethod(tree) || isSyntheticApply(tree)) tryImplicit(fallBack) else tryEither(tryApply) { (app, appState) => - tryImplicit { + tryImplicit: if assumeApplyExists then appState.commit() app else fallBack - } } - } - } /** If this tree is a select node `qual.name` (possibly applied to type variables) * that does not conform to `pt`, try two mitigations: @@ -3444,7 +3248,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * 2. Try to insert an implicit conversion `c` around `qual` so that * `c(qual).name` conforms to `pt`. */ - def tryInsertImplicitOnQualifier(tree: Tree, pt: Type, locked: TypeVars)(using Context): Option[Tree] = trace(i"try insert impl on qualifier $tree $pt") { + def tryInsertImplicitOnQualifier(tree: Tree, pt: Type, locked: TypeVars)(using Context): Option[Tree] = trace(i"try insert impl on qualifier $tree $pt"): tree match case tree @ Select(qual, name) if name != nme.CONSTRUCTOR => if couldInstantiateTypeVar(qual.tpe.widen, applied = true) @@ -3466,7 +3270,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case TypeApply(fn, args) if args.forall(_.isInstanceOf[untpd.InferredTypeTree]) => tryInsertImplicitOnQualifier(fn, pt, locked) case _ => None - } /** Given a selection `qual.name`, try to convert to an extension method * application `name(qual)` or insert an implicit conversion `c(qual).name`. @@ -3594,16 +3397,15 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer */ def adapt(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = try - trace(i"adapting $tree to $pt", typr, show = true) { + trace(i"adapting $tree to $pt", typr, show = true): record("adapt") adapt1(tree, pt, locked) - } catch case ex: TypeError => errorTree(tree, ex, tree.srcPos.focus) final def adapt(tree: Tree, pt: Type)(using Context): Tree = adapt(tree, pt, ctx.typerState.ownedVars) - private def adapt1(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = { + private def adapt1(tree: Tree, pt: Type, locked: TypeVars)(using Context): Tree = assert(pt.exists && !pt.isInstanceOf[ExprType] || ctx.reporter.errorsReported, i"tree: $tree, pt: $pt") def methodStr = err.refStr(methPart(tree).tpe) @@ -3689,7 +3491,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer end match end adaptOverloaded - def adaptToArgs(wtp: Type, pt: FunProto): Tree = wtp match { + def adaptToArgs(wtp: Type, pt: FunProto): Tree = wtp match case wtp: MethodOrPoly => def methodStr = methPart(tree).symbol.showLocated if (matchingApply(wtp, pt)) @@ -3714,24 +3516,21 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer readapt(tree.appliedToNone) // insert () to primary constructors else errorTree(tree, em"Missing arguments for $methodStr") - case _ => tryInsertApplyOrImplicit(tree, pt, locked) { - errorTree(tree, MethodDoesNotTakeParameters(tree)) - } - } + case _ => tryInsertApplyOrImplicit(tree, pt, locked): + errorTree(tree, MethodDoesNotTakeParameters(tree)) - def adaptNoArgsImplicitMethod(wtp: MethodType): Tree = { + def adaptNoArgsImplicitMethod(wtp: MethodType): Tree = assert(wtp.isImplicitMethod) val tvarsToInstantiate = tvarsInParams(tree, locked).distinct - def instantiate(tp: Type): Unit = { + def instantiate(tp: Type): Unit = instantiateSelected(tp, tvarsToInstantiate) replaceSingletons(tp) - } wtp.paramInfos.foreach(instantiate) val saved = ctx.typerState.snapshot() def dummyArg(tp: Type) = untpd.Ident(nme.???).withTypeUnchecked(tp) - def addImplicitArgs(using Context) = { + def addImplicitArgs(using Context) = def hasDefaultParams = methPart(tree).symbol.hasDefaultParams def implicitArgs(formals: List[Type], argIndex: Int, pt: Type): List[Tree] = formals match case Nil => Nil @@ -3784,23 +3583,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val args = implicitArgs(wtp.paramInfos, 0, pt) - def propagatedFailure(args: List[Tree]): Type = args match { + def propagatedFailure(args: List[Tree]): Type = args match case arg :: args1 => - arg.tpe match { + arg.tpe match case ambi: AmbiguousImplicits => - propagatedFailure(args1) match { + propagatedFailure(args1) match case NoType | (_: AmbiguousImplicits) => ambi case failed => failed - } case failed: SearchFailureType => failed case _ => propagatedFailure(args1) - } case Nil => NoType - } val propFail = propagatedFailure(args) - def issueErrors(): Tree = { + def issueErrors(): Tree = def paramSymWithMethodTree(paramName: TermName) = if tree.symbol.exists then tree.symbol.paramSymss.flatten @@ -3812,19 +3608,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer None wtp.paramNames.lazyZip(wtp.paramInfos).lazyZip(args).foreach { (paramName, formal, arg) => - arg.tpe match { + arg.tpe match case failure: SearchFailureType => report.error( missingArgMsg(arg, formal, implicitParamString(paramName, methodStr, tree), paramSymWithMethodTree(paramName)), tree.srcPos.endPos ) case _ => - } } untpd.Apply(tree, args).withType(propFail) - } - if (propFail.exists) { + if (propFail.exists) // If there are several arguments, some arguments might already // have influenced the context, binding variables, but later ones // might fail. In that case the constraint and instantiated variables @@ -3842,25 +3636,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typr.println(i"try with default implicit args $app") typed(app, pt, locked) else issueErrors() - } - else tree match { + else tree match case tree: Block => readaptSimplified(tpd.Block(tree.stats, tpd.Apply(tree.expr, args))) case tree: NamedArg => readaptSimplified(tpd.NamedArg(tree.name, tpd.Apply(tree.arg, args))) case _ => readaptSimplified(tpd.Apply(tree, args)) - } - } - pt.revealIgnored match { + pt.revealIgnored match case pt: FunProto if pt.applyKind == ApplyKind.Using => // We can end up here if extension methods are called with explicit given arguments. // See for instance #7119. tree case _ => addImplicitArgs(using argCtx(tree)) - } - } /** A synthetic apply should be eta-expanded if it is the apply of an implicit function * class, and the expected type is a function type. This rule is needed so we can pass @@ -3887,7 +3676,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * * Examples for these cases are found in run/implicitFuns.scala and neg/i2006.scala. */ - def adaptNoArgsUnappliedMethod(wtp: MethodType, functionExpected: Boolean, arity: Int): Tree = { + def adaptNoArgsUnappliedMethod(wtp: MethodType, functionExpected: Boolean, arity: Int): Tree = /** Is reference to this symbol `f` automatically expanded to `f()`? */ def isAutoApplied(sym: Symbol): Boolean = sym.isConstructor @@ -3922,11 +3711,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer adapt1(tree, pt1, locked) else if (!defn.isFunctionType(pt)) - pt match { + pt match case SAMType(_) if !pt.classSymbol.hasAnnotation(defn.FunctionalInterfaceAnnot) => report.warning(em"${tree.symbol} is eta-expanded even though $pt does not have the @FunctionalInterface annotation.", tree.srcPos) case _ => - } simplify(typed(etaExpand(tree, wtp, arity), pt), pt, locked) else if (wtp.paramInfos.isEmpty && isAutoApplied(tree.symbol)) readaptSimplified(tpd.Apply(tree, Nil)) @@ -3934,18 +3722,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer err.typeMismatch(tree, pt) else missingArgs(wtp) - } - def adaptNoArgsOther(wtp: Type, functionExpected: Boolean): Tree = { + def adaptNoArgsOther(wtp: Type, functionExpected: Boolean): Tree = val implicitFun = defn.isContextFunctionType(wtp) && !untpd.isContextualClosure(tree) def caseCompanion = functionExpected && tree.symbol.is(Module) && tree.symbol.companionClass.is(Case) && - !tree.tpe.baseClasses.exists(defn.isFunctionClass) && { + !tree.tpe.baseClasses.exists(defn.isFunctionClass) `&&`: report.warning("The method `apply` is inserted. The auto insertion will be deprecated, please write `" + tree.show + ".apply` explicitly.", tree.sourcePos) true - } if (implicitFun || caseCompanion) && !isApplyProto(pt) @@ -3976,19 +3762,17 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree.symbol != defn.StringContext_raw && tree.symbol != defn.StringContext_f && tree.symbol != defn.StringContext_s) - if (ctx.settings.XignoreScala2Macros.value) { + if (ctx.settings.XignoreScala2Macros.value) report.warning("Scala 2 macro cannot be used in Dotty, this call will crash at runtime. See https://docs.scala-lang.org/scala3/reference/dropped-features/macros.html", tree.srcPos.startPos) Throw(New(defn.MatchErrorClass.typeRef, Literal(Constant(s"Reached unexpanded Scala 2 macro call to ${tree.symbol.showFullName} compiled with -Xignore-scala2-macros.")) :: Nil)) .withType(tree.tpe) .withSpan(tree.span) - } - else { + else report.error( em"""Scala 2 macro cannot be used in Dotty. See https://docs.scala-lang.org/scala3/reference/dropped-features/macros.html |To turn this error into a warning, pass -Xignore-scala2-macros to the compiler""", tree.srcPos.startPos) tree - } else TypeComparer.testSubType(tree.tpe.widenExpr, pt) match case CompareResult.Fail => wtp match @@ -4010,18 +3794,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => //typr.println(i"OK ${tree.tpe}\n${TypeComparer.explained(_.isSubType(tree.tpe, pt))}") // uncomment for unexpected successes tree - } // Follow proxies and approximate type paramrefs by their upper bound // in the current constraint in order to figure out robustly // whether an expected type is some sort of function type. - def underlyingApplied(tp: Type): Type = tp.stripTypeVar match { + def underlyingApplied(tp: Type): Type = tp.stripTypeVar match case tp: RefinedType => tp case tp: AppliedType => tp case tp: TypeParamRef => underlyingApplied(TypeComparer.bounds(tp).hi) case tp: TypeProxy => underlyingApplied(tp.superType) case _ => tp - } // If the expected type is a selection of an extension method, deepen it // to also propagate the argument type (which is the receiver we have @@ -4034,14 +3816,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer if pt.memberProto.revealIgnored.isExtensionApplyProto => pt.deepenProto case _ => pt - def adaptNoArgs(wtp: Type): Tree = { + def adaptNoArgs(wtp: Type): Tree = val ptNorm = underlyingApplied(pt) def functionExpected = defn.isFunctionType(ptNorm) def needsEta = pt.revealIgnored match case _: SingletonType | _: FunOrPolyProto => false case _ => true var resMatch: Boolean = false - wtp match { + wtp match case wtp: ExprType => readaptSimplified(tree.withType(wtp.resultType)) case wtp: MethodType if wtp.isImplicitMethod && @@ -4072,25 +3854,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer adaptNoArgsUnappliedMethod(wtp, funExpected, arity) case _ => adaptNoArgsOther(wtp, functionExpected) - } - } /** Adapt an expression of constant type to a different constant type `tpe`. */ - def adaptConstant(tree: Tree, tpe: ConstantType): Tree = { + def adaptConstant(tree: Tree, tpe: ConstantType): Tree = def lit = Literal(tpe.value).withSpan(tree.span) - tree match { + tree match case Literal(c) => lit case tree @ Block(stats, expr) => tpd.cpy.Block(tree)(stats, adaptConstant(expr, tpe)) case tree => if (isIdempotentExpr(tree)) lit // See discussion in phase Literalize why we demand isIdempotentExpr else Block(tree :: Nil, lit) - } - } - def toSAM(tree: Tree): Tree = tree match { + def toSAM(tree: Tree): Tree = tree match case tree: Block => tpd.cpy.Block(tree)(tree.stats, toSAM(tree.expr)) case tree: Closure => cpy.Closure(tree)(tpt = TypeTree(pt)).withType(pt) - } def adaptToSubType(wtp: Type): Tree = // try converting a constant to the target type @@ -4112,22 +3889,20 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer return readapt(tree.cast(captured)) // drop type if prototype is Unit - if (pt isRef defn.UnitClass) { + if (pt isRef defn.UnitClass) // local adaptation makes sure every adapted tree conforms to its pt // so will take the code path that decides on inlining val tree1 = adapt(tree, WildcardType, locked) checkStatementPurity(tree1)(tree, ctx.owner) - if (!ctx.isAfterTyper && !tree.isInstanceOf[Inlined] && ctx.settings.WvalueDiscard.value && !isThisTypeResult(tree)) { + if (!ctx.isAfterTyper && !tree.isInstanceOf[Inlined] && ctx.settings.WvalueDiscard.value && !isThisTypeResult(tree)) report.warning(ValueDiscarding(tree.tpe), tree.srcPos) - } return tpd.Block(tree1 :: Nil, Literal(Constant(()))) - } // convert function literal to SAM closure - tree match { + tree match case closure(Nil, id @ Ident(nme.ANON_FUN), _) if defn.isFunctionType(wtp) && !defn.isFunctionType(pt) => - pt match { + pt match case SAMType(sam) if wtp <:< sam.toFunctionType(isJava = pt.classSymbol.is(JavaDefined)) => // was ... && isFullyDefined(pt, ForceDegree.flipBottom) @@ -4136,9 +3911,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // body is typechecked. return toSAM(tree) case _ => - } case _ => - } // try an Any -> Matchable conversion if pt.isMatchableBound && !wtp.derivesFrom(defn.MatchableClass) then @@ -4187,10 +3960,10 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer recover(NoMatchingImplicits) end adaptToSubType - def adaptType(tp: Type): Tree = { + def adaptType(tp: Type): Tree = val tree1 = if ((pt eq AnyTypeConstructorProto) || tp.typeParamSymbols.isEmpty) tree - else { + else val tp1 = if (ctx.isJava) // Cook raw type @@ -4199,10 +3972,8 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // Eta-expand higher-kinded type tree.tpe.EtaExpand(tp.typeParamSymbols) tree.withType(tp1) - } if (ctx.mode.is(Mode.Pattern) || ctx.mode.is(Mode.QuotedPattern) || tree1.tpe <:< pt) tree1 else err.typeMismatch(tree1, pt) - } /** If tree has an error type but no errors are reported yet, issue * the error message stored in the type. @@ -4213,10 +3984,9 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer * another alternative succeeds we can be left with an ErrorType in the * tree that went unreported. A scenario where this happens is i1802.scala. */ - def ensureReported(tp: Type) = tp match { + def ensureReported(tp: Type) = tp match case err: ErrorType if !ctx.reporter.errorsReported => report.error(err.msg, tree.srcPos) case _ => - } /** Convert constructor proxy reference to a new expression */ def newExpr = @@ -4242,46 +4012,41 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case Select(_, nme.apply) => tree.symbol.isAllOf(ApplyProxyFlags) case _ => false - tree match { + tree match case _: MemberDef | _: PackageDef | _: Import | _: WithoutTypeOrPos[?] | _: Closure => tree - case _ => tree.tpe.widen match { - case tp: FlexType => - ensureReported(tp) - tree - case ref: TermRef => - pt match { - case pt: FunProto - if needsTupledDual(ref, pt) && Feature.autoTuplingEnabled => - adapt(tree, pt.tupledDual, locked) - case _ => - adaptOverloaded(ref) - } - case poly: PolyType if !(ctx.mode is Mode.Type) => - if isApplyProxy(tree) then newExpr - else if pt.isInstanceOf[PolyProto] then tree - else - var typeArgs = tree match - case Select(qual, nme.CONSTRUCTOR) => qual.tpe.widenDealias.argTypesLo.map(TypeTree(_)) - case _ => Nil - if typeArgs.isEmpty then typeArgs = constrained(poly, tree)._2 - convertNewGenericArray(readapt(tree.appliedToTypeTrees(typeArgs))) - case wtp => - val isStructuralCall = wtp.isValueType && isStructuralTermSelectOrApply(tree) - if (isStructuralCall) - readaptSimplified(handleStructural(tree)) - else pt match { - case pt: FunProto => - if isApplyProxy(tree) then newExpr - else adaptToArgs(wtp, pt) - case pt: PolyProto if !wtp.isImplicitMethod => - tryInsertApplyOrImplicit(tree, pt, locked)(tree) // error will be reported in typedTypeApply - case _ => - if (ctx.mode is Mode.Type) adaptType(tree.tpe) - else adaptNoArgs(wtp) - } - } - } - } + case _ => tree.tpe.widen match + case tp: FlexType => + ensureReported(tp) + tree + case ref: TermRef => + pt match + case pt: FunProto + if needsTupledDual(ref, pt) && Feature.autoTuplingEnabled => + adapt(tree, pt.tupledDual, locked) + case _ => + adaptOverloaded(ref) + case poly: PolyType if !(ctx.mode is Mode.Type) => + if isApplyProxy(tree) then newExpr + else if pt.isInstanceOf[PolyProto] then tree + else + var typeArgs = tree match + case Select(qual, nme.CONSTRUCTOR) => qual.tpe.widenDealias.argTypesLo.map(TypeTree(_)) + case _ => Nil + if typeArgs.isEmpty then typeArgs = constrained(poly, tree)._2 + convertNewGenericArray(readapt(tree.appliedToTypeTrees(typeArgs))) + case wtp => + val isStructuralCall = wtp.isValueType && isStructuralTermSelectOrApply(tree) + if (isStructuralCall) + readaptSimplified(handleStructural(tree)) + else pt match + case pt: FunProto => + if isApplyProxy(tree) then newExpr + else adaptToArgs(wtp, pt) + case pt: PolyProto if !wtp.isImplicitMethod => + tryInsertApplyOrImplicit(tree, pt, locked)(tree) // error will be reported in typedTypeApply + case _ => + if (ctx.mode is Mode.Type) adaptType(tree.tpe) + else adaptNoArgs(wtp) /** True if this inline typer has already issued errors */ def hasInliningErrors(using Context): Boolean = false @@ -4310,12 +4075,11 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tree match case _: RefTree | _: Literal if !isVarPattern(tree) && !(pt <:< tree.tpe) => - withMode(Mode.GadtConstraintInference) { + withMode(Mode.GadtConstraintInference): TypeComparer.constrainPatternType(tree.tpe, pt) - } // approximate type params with bounds - def approx = new ApproximatingTypeMap { + def approx = new ApproximatingTypeMap: var alreadyExpanding: List[TypeRef] = Nil def apply(tp: Type) = tp.dealias match case tp: TypeRef if !tp.symbol.isClass => @@ -4327,13 +4091,12 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer res case _ => mapOver(tp) - } // Is it certain that a value of `tree.tpe` is never a subtype of `pt`? // It is true if either // - the class of `tree.tpe` and class of `pt` cannot have common subclass, or // - `tree` is an object or enum value, which cannot possibly be a subtype of `pt` - val isDefiniteNotSubtype = { + val isDefiniteNotSubtype = val clsA = tree.tpe.widenDealias.classSymbol val clsB = pt.dealias.classSymbol clsA.exists && clsB.exists @@ -4343,7 +4106,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer || tree.symbol.isOneOf(Module | Enum) && !(tree.tpe frozen_<:< pt) // fast track && !(tree.tpe frozen_<:< approx(pt)) - } if isDefiniteNotSubtype then // We could check whether `equals` is overridden. @@ -4360,7 +4122,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer typedExpr(cmp, defn.BooleanType) case _ => - private def checkInterestingResultInStatement(t: Tree)(using Context): Boolean = { + private def checkInterestingResultInStatement(t: Tree)(using Context): Boolean = def isUninterestingSymbol(sym: Symbol): Boolean = sym == NoSymbol || sym.isConstructor || @@ -4379,16 +4141,14 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer tpe =:= defn.AnyValType || tpe =:= defn.AnyType || tpe =:= defn.AnyRefType - def isJavaApplication(t: Tree): Boolean = t match { + def isJavaApplication(t: Tree): Boolean = t match case Apply(f, _) => f.symbol.is(JavaDefined) && !defn.ObjectClass.isSubClass(f.symbol.owner) case _ => false - } - def checkInterestingShapes(t: Tree): Boolean = t match { + def checkInterestingShapes(t: Tree): Boolean = t match case If(_, thenpart, elsepart) => checkInterestingShapes(thenpart) || checkInterestingShapes(elsepart) case Block(_, res) => checkInterestingShapes(res) case Match(_, cases) => cases.exists(k => checkInterestingShapes(k.body)) case _ => checksForInterestingResult(t) - } def checksForInterestingResult(t: Tree): Boolean = ( !t.isDef // ignore defs && !isUninterestingSymbol(t.symbol) // ctors, package, Unit, Any @@ -4399,19 +4159,16 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer // && !treeInfo.hasExplicitUnit(t) // suppressed by explicit expr: Unit // TODO Should explicit `: Unit` be added as warning suppression? ) if ctx.settings.WNonUnitStatement.value && !ctx.isAfterTyper && checkInterestingShapes(t) then - val where = t match { + val where = t match case Block(_, res) => res case If(_, thenpart, Literal(Constant(()))) => - thenpart match { + thenpart match case Block(_, res) => res case _ => thenpart - } case _ => t - } report.warning(UnusedNonUnitValue(where.tpe), t.srcPos) true else false - } private def checkStatementPurity(tree: tpd.Tree)(original: untpd.Tree, exprOwner: Symbol)(using Context): Unit = if !tree.tpe.isErroneous @@ -4437,7 +4194,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer /** Types the body Scala 2 macro declaration `def f = macro ` */ protected def typedScala2MacroBody(call: untpd.Tree)(using Context): Tree = // TODO check that call is to a method with valid signature - def typedPrefix(tree: untpd.RefTree)(splice: Context ?=> Tree => Tree)(using Context): Tree = { + def typedPrefix(tree: untpd.RefTree)(splice: Context ?=> Tree => Tree)(using Context): Tree = tryAlternatively { splice(typedExpr(tree, defn.AnyType)) } { @@ -4450,7 +4207,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val bundleVal = SyntheticValDef(NameKinds.UniqueName.fresh(nme.bundle), bundle1).withSpan(call.span) tpd.Block(List(bundleVal), splice(tpd.ref(bundleVal.symbol))).withSpan(call.span) } - } if ctx.phase.isTyper then call match case untpd.Ident(nme.???) => // Instinsic macros ignored @@ -4494,7 +4250,7 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer val gadtApprox = Inferencing.approximateGADT(wtp) gadts.println(i"gadt approx $wtp ~~~ $gadtApprox") val conj = - TypeComparer.testSubType(gadtApprox, pt) match { + TypeComparer.testSubType(gadtApprox, pt) match case CompareResult.OK => // GADT approximation of the tree type is a subtype of expected type under empty GADT // constraints, so it is enough to only have the GADT approximation. @@ -4502,7 +4258,6 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer case _ => // In other cases, we intersect both the approximated type and the expected type. AndType(AndType(tree.tpe, gadtApprox), pt) - } if tree.tpe.isStable && !conj.isStable then // this is needed for -Ycheck. Without the annotation Ycheck will // skolemize the result type which will lead to different types before @@ -4513,4 +4268,3 @@ class Typer(@constructorOnly nestingLevel: Int = 0) extends Namer gadts.println(i"insert GADT cast from $tree to $target") tree.cast(target) end insertGadtCast -} diff --git a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala index 60f0c043b435..4a09a5567528 100644 --- a/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala +++ b/compiler/src/dotty/tools/dotc/typer/TyperPhase.scala @@ -17,7 +17,7 @@ import util.Stats._ * @param addRootImports Set to false in the REPL. Calling [[ImportInfo.withRootImports]] on the [[Context]] * for each [[CompilationUnit]] causes dotty.tools.repl.ScriptedTests to fail. */ -class TyperPhase(addRootImports: Boolean = true) extends Phase { +class TyperPhase(addRootImports: Boolean = true) extends Phase: override def phaseName: String = TyperPhase.name @@ -31,13 +31,12 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { // Run regardless of parsing errors override def isRunnable(implicit ctx: Context): Boolean = true - def enterSyms(using Context): Unit = monitor("indexing") { + def enterSyms(using Context): Unit = monitor("indexing"): val unit = ctx.compilationUnit ctx.typer.index(unit.untpdTree) typr.println("entered: " + unit.source) - } - def typeCheck(using Context): Unit = monitor("typechecking") { + def typeCheck(using Context): Unit = monitor("typechecking"): val unit = ctx.compilationUnit try if !unit.suspended then @@ -51,13 +50,11 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { case ex: Throwable => println(s"$ex while typechecking $unit") throw ex - } - def javaCheck(using Context): Unit = monitor("checking java") { + def javaCheck(using Context): Unit = monitor("checking java"): val unit = ctx.compilationUnit if unit.isJava then JavaChecks.check(unit.tpdTree) - } protected def discardAfterTyper(unit: CompilationUnit)(using Context): Boolean = unit.isJava || unit.suspended @@ -75,7 +72,7 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { unitContexts.foreach(enterSyms(using _)) - ctx.base.parserPhase match { + ctx.base.parserPhase match case p: ParserPhase => if p.firstXmlPos.exists && !defn.ScalaXmlPackageClass.exists then report.error( @@ -83,7 +80,6 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { |See https://github.com/scala/scala-xml for more information.""".stripMargin, p.firstXmlPos) case _ => - } unitContexts.foreach(typeCheck(using _)) record("total trees after typer", ast.Trees.ntrees) @@ -94,15 +90,12 @@ class TyperPhase(addRootImports: Boolean = true) extends Phase { newUnits def run(using Context): Unit = unsupported("run") -} -object TyperPhase { +object TyperPhase: val name: String = "typer" val description: String = "type the trees" -} @deprecated(message = "FrontEnd has been split into TyperPhase and Parser. Refer to one or the other.") -object FrontEnd { +object FrontEnd: // For backwards compatibility: some plugins refer to FrontEnd so that they can schedule themselves after it. val name: String = TyperPhase.name -} diff --git a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala index bcfc9288d862..228876369e16 100644 --- a/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala +++ b/compiler/src/dotty/tools/dotc/typer/VarianceChecker.scala @@ -17,7 +17,7 @@ import printing.Formatting.hl * in tree are variance correct. Does not recurse inside methods. * The method should be invoked once for each Template. */ -object VarianceChecker { +object VarianceChecker: case class VarianceError(tvar: Symbol, required: Variance) def check(tree: tpd.Tree)(using Context): Unit = VarianceChecker().Traverser.traverse(tree) @@ -29,10 +29,10 @@ object VarianceChecker { def checkLambda(tree: tpd.LambdaTypeTree, bounds: TypeBounds)(using Context): Unit = def checkType(tpe: Type): Unit = tpe match case tl: HKTypeLambda if tl.isDeclaredVarianceLambda => - val checkOK = new TypeAccumulator[Boolean] { + val checkOK = new TypeAccumulator[Boolean]: def paramVarianceSign(tref: TypeParamRef) = tl.typeParams(tref.paramNum).paramVarianceSign - def error(tref: TypeParamRef) = { + def error(tref: TypeParamRef) = val paramName = tl.paramNames(tref.paramNum).toTermName val v = paramVarianceSign(tref) val pos = tree.tparams @@ -40,9 +40,8 @@ object VarianceChecker { .map(_.srcPos) .getOrElse(tree.srcPos) report.error(em"${varianceLabel(v)} type parameter $paramName occurs in ${varianceLabel(variance)} position in ${tl.resType}", pos) - } - def apply(x: Boolean, t: Type) = x && { - t match { + def apply(x: Boolean, t: Type) = x `&&`: + t match case tref: TypeParamRef if tref.binder `eq` tl => varianceConforms(variance, paramVarianceSign(tref)) || { error(tref); false } @@ -50,9 +49,6 @@ object VarianceChecker { x case _ => foldOver(x, t) - } - } - } checkOK(true, tl.resType) case _ => end checkType @@ -60,13 +56,12 @@ object VarianceChecker { checkType(bounds.lo) checkType(bounds.hi) end checkLambda -} -class VarianceChecker(using Context) { +class VarianceChecker(using Context): import VarianceChecker._ import tpd._ - private object Validator extends TypeAccumulator[Option[VarianceError]] { + private object Validator extends TypeAccumulator[Option[VarianceError]]: private var base: Symbol = _ /** The variance of a symbol occurrence of `tvar` seen at the level of the definition of `base`. @@ -98,10 +93,10 @@ class VarianceChecker(using Context) { if (meth.isConstructor) meth.owner.owner else meth.owner /** Check variance of abstract type `tvar` when referred from `base`. */ - private def checkVarianceOfSymbol(tvar: Symbol): Option[VarianceError] = { + private def checkVarianceOfSymbol(tvar: Symbol): Option[VarianceError] = val relative = relativeVariance(tvar, base) if (relative == Bivariant) None - else { + else val required = if variance == 1 then relative else if variance == -1 then flip(relative) else Invariant def tvar_s = s"$tvar (${varianceLabel(tvar.flags)} ${tvar.showLocated})" def base_s = s"$base in ${base.owner}" + (if (base.owner.isClass) "" else " in " + base.owner.enclosingClass) @@ -111,25 +106,22 @@ class VarianceChecker(using Context) { report.log(s"owner chain: ${base.ownersIterator.toList}") if (tvar.isOneOf(required)) None else Some(VarianceError(tvar, required)) - } - } /** For PolyTypes, type parameters are skipped because they are defined * explicitly (their TypeDefs will be passed here.) For MethodTypes, the * same is true of the parameters (ValDefs). */ - def apply(status: Option[VarianceError], tp: Type): Option[VarianceError] = trace(s"variance checking $tp of $base at $variance", variances) { + def apply(status: Option[VarianceError], tp: Type): Option[VarianceError] = trace(s"variance checking $tp of $base at $variance", variances): try if (status.isDefined) status - else tp match { + else tp match case tp: TypeRef => val sym = tp.symbol if (sym.isOneOf(VarianceFlags) && base.isContainedIn(sym.owner)) checkVarianceOfSymbol(sym) - else sym.info match { + else sym.info match case MatchAlias(_) => foldOver(status, tp) case TypeAlias(alias) => this(status, alias) case _ => foldOver(status, tp) - } case tp: MethodOrPoly => this(status, tp.resultType) // params will be checked in their TypeDef or ValDef nodes. case AnnotatedType(_, annot) if annot.symbol == defn.UncheckedVarianceAnnot => @@ -138,22 +130,17 @@ class VarianceChecker(using Context) { foldOver(status, tp.parents) case _ => foldOver(status, tp) - } - catch { + catch case ex: Throwable => handleRecursive("variance check of", tp.show, ex) - } - } - def validateDefinition(base: Symbol): Option[VarianceError] = { + def validateDefinition(base: Symbol): Option[VarianceError] = val saved = this.base this.base = base try apply(None, base.info) finally this.base = saved - } - } - private object Traverser extends TreeTraverser { - def checkVariance(sym: Symbol, pos: SrcPos) = Validator.validateDefinition(sym) match { + private object Traverser extends TreeTraverser: + def checkVariance(sym: Symbol, pos: SrcPos) = Validator.validateDefinition(sym) match case Some(VarianceError(tvar, required)) => def msg = val enumAddendum = @@ -176,34 +163,27 @@ class VarianceChecker(using Context) { // TODO need to use a `:' if annotation is on term else report.error(msg, pos) case None => - } - override def traverse(tree: Tree)(using Context) = { + override def traverse(tree: Tree)(using Context) = def sym = tree.symbol // No variance check for private/protected[this] methods/values. def skip = !sym.exists || sym.name.is(InlineAccessorName) // TODO: should we exclude all synthetic members? || sym.isAllOf(LocalParamAccessor) // local class parameters are construction only || sym.is(TypeParam) && sym.owner.isClass // already taken care of in primary constructor of class - try tree match { + try tree match case defn: MemberDef if skip => report.debuglog(s"Skipping variance check of ${sym.showDcl}") case tree: TypeDef => checkVariance(sym, tree.srcPos) - tree.rhs match { + tree.rhs match case rhs: Template => traverseChildren(rhs) case _ => - } case tree: ValDef => checkVariance(sym, tree.srcPos) case DefDef(_, paramss, _, _) => checkVariance(sym, tree.srcPos) paramss.foreach(_.foreach(traverse)) case _ => - } - catch { + catch case ex: TypeError => report.error(ex, tree.srcPos.focus) - } - } - } -} diff --git a/compiler/src/dotty/tools/dotc/util/Attachment.scala b/compiler/src/dotty/tools/dotc/util/Attachment.scala index d545c3631f20..76fd57af7f18 100644 --- a/compiler/src/dotty/tools/dotc/util/Attachment.scala +++ b/compiler/src/dotty/tools/dotc/util/Attachment.scala @@ -9,89 +9,78 @@ import core.Contexts.Context * Attachments whose key is an instance of `StickyKey` will be kept when the attachments * are copied using `withAttachmentsFrom`. */ -object Attachment { +object Attachment: import Property.{Key, StickyKey} /** An implementation trait for attachments. * Clients should inherit from Container instead. */ - trait LinkSource { + trait LinkSource: private[Attachment] var next: Link[?] | Null /** Optionally get attachment corresponding to `key` */ - final def getAttachment[V](key: Key[V]): Option[V] = { + final def getAttachment[V](key: Key[V]): Option[V] = val nx = next if (nx == null) None else if (nx.key eq key) Some(nx.value.asInstanceOf[V]) else nx.getAttachment[V](key) - } /** Does an attachment corresponding to `key` exist? */ - final def hasAttachment[V](key: Key[V]): Boolean = { + final def hasAttachment[V](key: Key[V]): Boolean = val nx = next if (nx == null) false else if (nx.key eq key) true else nx.hasAttachment[V](key) - } /** The attachment corresponding to `key`. * @throws NoSuchElementException if no attachment with key exists */ - final def attachment[V](key: Key[V]): V = { + final def attachment[V](key: Key[V]): V = val nx = next if (nx == null) throw new NoSuchElementException else if (nx.key eq key) nx.value.asInstanceOf[V] else nx.attachment(key) - } /** The attachment corresponding to `key`, or `default` * if no attachment with `key` exists. */ - final def attachmentOrElse[V](key: Key[V], default: V): V = { + final def attachmentOrElse[V](key: Key[V], default: V): V = val nx = next if (nx == null) default else if (nx.key eq key) nx.value.asInstanceOf[V] else nx.attachmentOrElse(key, default) - } /** Add attachment with given `key` and `value`. * @return Optionally, the old attachment with given `key` if one existed before. * The new attachment is added at the position of the old one, or at the end * if no attachment with same `key` existed. */ - final def putAttachment[V](key: Key[V], value: V): Option[V] = { + final def putAttachment[V](key: Key[V], value: V): Option[V] = val nx = next - if (nx == null) { + if (nx == null) next = new Link(key, value, null) None - } - else if (nx.key eq key) { + else if (nx.key eq key) next = new Link(key, value, nx.next) Some(nx.value.asInstanceOf[V]) - } else nx.putAttachment(key, value) - } /** Remove attachment with given `key`, if it exists. * @return Optionally, the removed attachment with given `key` if one existed before. */ - final def removeAttachment[V](key: Key[V]): Option[V] = { + final def removeAttachment[V](key: Key[V]): Option[V] = val nx = next if (nx == null) None - else if (nx.key eq key) { + else if (nx.key eq key) next = nx.next Some(nx.value.asInstanceOf[V]) - } else nx.removeAttachment(key) - } /** The list of all keys and values attached to this container. */ - final def allAttachments: List[(Key[?], Any)] = { + final def allAttachments: List[(Key[?], Any)] = val nx = next if (nx == null) Nil else (nx.key, nx.value) :: nx.allAttachments - } - } /** A private, concrete implementation class linking attachments. */ @@ -99,30 +88,24 @@ object Attachment { extends LinkSource /** A trait for objects that can contain attachments */ - trait Container extends LinkSource { + trait Container extends LinkSource: private[Attachment] var next: Link[?] | Null = null /** Copy the sticky attachments from `container` to this container. */ - final def withAttachmentsFrom(container: Container): this.type = { + final def withAttachmentsFrom(container: Container): this.type = var current: Link[?] | Null = container.next - while (current != null) { + while (current != null) if (current.key.isInstanceOf[StickyKey[?]]) putAttachment(current.key, current.value) current = current.next - } this - } - def withAttachment[V](key: Key[V], value: V): this.type = { + def withAttachment[V](key: Key[V], value: V): this.type = putAttachment(key, value) this - } - final def pushAttachment[V](key: Key[V], value: V)(using ctx: Context): Unit = { + final def pushAttachment[V](key: Key[V], value: V)(using ctx: Context): Unit = assert(!hasAttachment(key) || ctx.base.errorsToBeReported, s"duplicate attachment for key $key") next = new Link(key, value, next) - } final def removeAllAttachments(): Unit = next = null - } -} diff --git a/compiler/src/dotty/tools/dotc/util/Chars.scala b/compiler/src/dotty/tools/dotc/util/Chars.scala index cde1a63f5293..c518cdbe000a 100644 --- a/compiler/src/dotty/tools/dotc/util/Chars.scala +++ b/compiler/src/dotty/tools/dotc/util/Chars.scala @@ -18,7 +18,7 @@ object Chars: /** Convert a character digit to an Int according to given base, * -1 if no success */ - def digit2int(ch: Char, base: Int): Int = { + def digit2int(ch: Char, base: Int): Int = val num = ( if (ch <= '9') ch - '0' else if ('a' <= ch && ch <= 'z') ch - 'a' + 10 @@ -26,12 +26,11 @@ object Chars: else -1 ) if (0 <= num && num < base) num else -1 - } /** Buffer for creating '\ u XXXX' strings. */ private[this] val char2uescapeArray = Array[Char]('\\', 'u', 0, 0, 0, 0) /** Convert a character to a backslash-u escape */ - def char2uescape(c: Char): String = { + def char2uescape(c: Char): String = inline def hexChar(ch: Int): Char = (( if (ch < 10) '0' else 'A' - 10 ) + ch).toChar @@ -41,13 +40,11 @@ object Chars: char2uescapeArray(5) = hexChar((c ) % 16) new String(char2uescapeArray) - } /** Is character a line break? */ - def isLineBreakChar(c: Char): Boolean = (c: @switch) match { + def isLineBreakChar(c: Char): Boolean = (c: @switch) match case LF|FF|CR|SU => true case _ => false - } /** Is character a whitespace character (but not a new line)? */ def isWhitespace(c: Char): Boolean = @@ -66,14 +63,12 @@ object Chars: def isIdentifierPart(c: CodePoint) = (c == '$') || isUnicodeIdentifierPart(c) /** Is character a math or other symbol in Unicode? */ - def isSpecial(c: Char): Boolean = { + def isSpecial(c: Char): Boolean = val chtp = Character.getType(c) chtp == MATH_SYMBOL.toInt || chtp == OTHER_SYMBOL.toInt - } - def isSpecial(codePoint: CodePoint) = { + def isSpecial(codePoint: CodePoint) = val chtp = Character.getType(codePoint) chtp == MATH_SYMBOL.toInt || chtp == OTHER_SYMBOL.toInt - } def isValidJVMChar(c: Char): Boolean = !(c == '.' || c == ';' || c =='[' || c == '/') @@ -82,31 +77,27 @@ object Chars: !(c == '.' || c == ';' || c =='[' || c == '/' || c == '<' || c == '>') def isScalaLetter(c: Char): Boolean = - Character.getType(c: @switch) match { + Character.getType(c: @switch) match case LOWERCASE_LETTER | UPPERCASE_LETTER | OTHER_LETTER | TITLECASE_LETTER | LETTER_NUMBER => true case _ => c == '$' || c == '_' - } def isScalaLetter(c: CodePoint): Boolean = - Character.getType(c: @switch) match { + Character.getType(c: @switch) match case LOWERCASE_LETTER | UPPERCASE_LETTER | OTHER_LETTER | TITLECASE_LETTER | LETTER_NUMBER => true case _ => c == '$' || c == '_' - } /** Can character form part of a Scala operator name? */ - def isOperatorPart(c: Char): Boolean = (c: @switch) match { + def isOperatorPart(c: Char): Boolean = (c: @switch) match case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | '|' | '/' | '\\' => true case c => isSpecial(c) - } - def isOperatorPart(c: CodePoint): Boolean = (c: @switch) match { + def isOperatorPart(c: CodePoint): Boolean = (c: @switch) match case '~' | '!' | '@' | '#' | '%' | '^' | '*' | '+' | '-' | '<' | '>' | '?' | ':' | '=' | '&' | '|' | '/' | '\\' => true case c => isSpecial(c) - } /** Would the character be encoded by `NameTransformer.encode`? */ def willBeEncoded(c: Char): Boolean = !isJavaIdentifierPart(c) diff --git a/compiler/src/dotty/tools/dotc/util/ClasspathFromClassloader.scala b/compiler/src/dotty/tools/dotc/util/ClasspathFromClassloader.scala index 25def103083d..b79845036511 100644 --- a/compiler/src/dotty/tools/dotc/util/ClasspathFromClassloader.scala +++ b/compiler/src/dotty/tools/dotc/util/ClasspathFromClassloader.scala @@ -7,18 +7,18 @@ import java.nio.file.Paths import dotty.tools.repl.AbstractFileClassLoader -object ClasspathFromClassloader { +object ClasspathFromClassloader: /** Attempt to recreate a classpath from a classloader. * * BEWARE: with exotic enough classloaders, this may not work at all or do * the wrong thing. */ - def apply(cl: ClassLoader): String = { + def apply(cl: ClassLoader): String = val classpathBuff = List.newBuilder[String] - def collectClassLoaderPaths(cl: ClassLoader): Unit = { - if (cl != null) { - cl match { + def collectClassLoaderPaths(cl: ClassLoader): Unit = + if (cl != null) + cl match case cl: URLClassLoader => // This is wrong if we're in a subclass of URLClassLoader // that filters loading classes from its parent ¯\_(ツ)_/¯ @@ -44,10 +44,5 @@ object ClasspathFromClassloader { // HACK: For Java 9+, if the classloader is an AppClassLoader then use the classpath from the system // property `java.class.path`. classpathBuff += System.getProperty("java.class.path") - } - } - } collectClassLoaderPaths(cl) classpathBuff.result().mkString(java.io.File.pathSeparator) - } -} diff --git a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala index b4af59c09310..302826b6b177 100644 --- a/compiler/src/dotty/tools/dotc/util/CommentParsing.scala +++ b/compiler/src/dotty/tools/dotc/util/CommentParsing.scala @@ -16,7 +16,7 @@ import scala.collection.mutable * `@define` annotations. The rest of the comment is untouched and later * handled by scaladoc. */ -object CommentParsing { +object CommentParsing: import Chars._ /** Returns index of string `str` following `start` skipping longest @@ -48,13 +48,12 @@ object CommentParsing { */ def skipLineLead(str: String, start: Int): Int = if (start == str.length) start - else { + else val idx = skipWhitespace(str, start + 1) if (idx < str.length && (str charAt idx) == '*') skipWhitespace(str, idx + 1) else if (idx + 2 < str.length && (str charAt idx) == '/' && (str charAt (idx + 1)) == '*' && (str charAt (idx + 2)) == '*') skipWhitespace(str, idx + 3) else idx - } /** Skips to next occurrence of `\n` or to the position after the `/``**` sequence following index `start`. */ @@ -66,20 +65,18 @@ object CommentParsing { /** Returns first index following `start` and starting a line (i.e. after skipLineLead) or starting the comment * which satisfies predicate `p`. */ - def findNext(str: String, start: Int)(p: Int => Boolean): Int = { + def findNext(str: String, start: Int)(p: Int => Boolean): Int = val idx = skipLineLead(str, skipToEol(str, start)) if (idx < str.length && !p(idx)) findNext(str, idx)(p) else idx - } /** Return first index following `start` and starting a line (i.e. after skipLineLead) * which satisfies predicate `p`. */ - def findAll(str: String, start: Int)(p: Int => Boolean): List[Int] = { + def findAll(str: String, start: Int)(p: Int => Boolean): List[Int] = val idx = findNext(str, start)(p) if (idx == str.length) List() else idx :: findAll(str, idx)(p) - } /** Produces a string index, which is a list of `sections`, i.e * pairs of start/end positions of all tagged sections in the string. @@ -91,30 +88,27 @@ object CommentParsing { * usecase or the end of the string, as they might include other sections * of their own */ - def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = { + def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = var indices = findAll(str, 0) (idx => str(idx) == '@' && p(idx)) indices = mergeUsecaseSections(str, indices) indices = mergeInheritdocSections(str, indices) - indices match { + indices match case List() => List() case idxs => idxs zip (idxs.tail ::: List(str.length - 2)) - } - } /** * Merge sections following an usecase into the usecase comment, so they * can override the parent symbol's sections */ def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = - idxs.indexWhere(str.startsWith("@usecase", _)) match { + idxs.indexWhere(str.startsWith("@usecase", _)) match case firstUCIndex if firstUCIndex != -1 => val commentSections = idxs.take(firstUCIndex) val usecaseSections = idxs.drop(firstUCIndex).filter(str.startsWith("@usecase", _)) commentSections ::: usecaseSections case _ => idxs - } /** * Merge the inheritdoc sections, as they never make sense on their own @@ -133,21 +127,18 @@ object CommentParsing { /** The first start tag of a list of tag intervals, * or the end of the whole comment string - 2 if list is empty */ - def startTag(str: String, sections: List[(Int, Int)]): Int = sections match { + def startTag(str: String, sections: List[(Int, Int)]): Int = sections match case Nil => str.length - 2 case (start, _) :: _ => start - } /** A map from parameter names to start/end indices describing all parameter * sections in `str` tagged with `tag`, where `sections` is the index of `str`. */ def paramDocs(str: String, tag: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] = - Map() ++ { - for (section <- sections if startsWithTag(str, section, tag)) yield { + Map() `++`: + for (section <- sections if startsWithTag(str, section, tag)) yield val start = skipWhitespace(str, section._1 + tag.length) str.substring(start, skipIdent(str, start)) -> section - } - } /** Optionally start and end index of return section in `str`, or `None` * if `str` does not have a @group. */ @@ -170,36 +161,32 @@ object CommentParsing { /** Returns index following variable, or start index if no variable was recognized */ - def skipVariable(str: String, start: Int): Int = { + def skipVariable(str: String, start: Int): Int = var idx = start - if (idx < str.length && (str charAt idx) == '{') { + if (idx < str.length && (str charAt idx) == '{') while ({ idx += 1 idx < str.length && (str charAt idx) != '}' }) () if (idx < str.length) idx + 1 else start - } - else { + else while (idx < str.length && isVarPart(str charAt idx)) idx += 1 idx - } - } /** A map from the section tag to section parameters */ def sectionTagMap(str: String, sections: List[(Int, Int)]): Map[String, (Int, Int)] = - Map() ++ { + Map() `++`: for (section <- sections) yield extractSectionTag(str, section) -> section - } /** Extract the section tag, treating the section tag as an identifier */ def extractSectionTag(str: String, section: (Int, Int)): String = str.substring(section._1, skipTag(str, section._1)) /** Extract the section parameter */ - def extractSectionParam(str: String, section: (Int, Int)): String = { + def extractSectionParam(str: String, section: (Int, Int)): String = val (beg, _) = section assert(str.startsWith("@param", beg) || str.startsWith("@tparam", beg) || @@ -209,10 +196,9 @@ object CommentParsing { val finish = skipIdent(str, start) str.substring(start, finish) - } /** Extract the section text, except for the tag and comment newlines */ - def extractSectionText(str: String, section: (Int, Int)): (Int, Int) = { + def extractSectionText(str: String, section: (Int, Int)): (Int, Int) = val (beg, end) = section if (str.startsWith("@param", beg) || str.startsWith("@tparam", beg) || @@ -220,27 +206,24 @@ object CommentParsing { (skipWhitespace(str, skipIdent(str, skipWhitespace(str, skipTag(str, beg)))), end) else (skipWhitespace(str, skipTag(str, beg)), end) - } /** Cleanup section text */ - def cleanupSectionText(str: String): String = { + def cleanupSectionText(str: String): String = var result = str.trim.replaceAll("\n\\s+\\*\\s+", " \n") while (result.endsWith("\n")) result = result.substring(0, str.length - 1) result - } /** A map from tag name to all boundaries for this tag */ - def groupedSections(str: String, sections: List[(Int, Int)]): Map[String, List[(Int, Int)]] = { + def groupedSections(str: String, sections: List[(Int, Int)]): Map[String, List[(Int, Int)]] = val map = mutable.Map.empty[String, List[(Int, Int)]].withDefaultValue(Nil) sections.reverse.foreach { bounds => val tag = extractSectionTag(str, bounds) map.update(tag, (skipTag(str, bounds._1), bounds._2) :: map(tag)) } map.toMap - } - def removeSections(raw: String, xs: String*): String = { + def removeSections(raw: String, xs: String*): String = val sections = tagIndex(raw) val toBeRemoved = for { @@ -252,5 +235,3 @@ object CommentParsing { val end = startTag(raw, toBeRemoved.flatten.sortBy(_._1).toList) if (end == raw.length - 2) raw else raw.substring(0, end) + "*/" - } -} diff --git a/compiler/src/dotty/tools/dotc/util/DiffUtil.scala b/compiler/src/dotty/tools/dotc/util/DiffUtil.scala index cec86fa84443..906edc2fd833 100644 --- a/compiler/src/dotty/tools/dotc/util/DiffUtil.scala +++ b/compiler/src/dotty/tools/dotc/util/DiffUtil.scala @@ -5,7 +5,7 @@ import scala.language.unsafeNulls import scala.annotation.tailrec import scala.collection.mutable -object DiffUtil { +object DiffUtil: val EOF: String = new String("EOF") // Unique string up to reference @@ -14,13 +14,12 @@ object DiffUtil { @tailrec private def splitTokens(str: String, acc: List[String] = Nil): List[String] = if (str == "") acc.reverse - else { + else val head = str.charAt(0) val (token, rest) = - if (head == ansiColorToken) { // ansi color token + if (head == ansiColorToken) // ansi color token val splitIndex = str.indexOf('m') + 1 (str.substring(0, splitIndex), str.substring(splitIndex)) - } else if (Character.isAlphabetic(head) || Character.isDigit(head)) str.span(c => Character.isAlphabetic(c) || Character.isDigit(c) && c != ansiColorToken) else if (Character.isMirrored(head) || Character.isWhitespace(head)) @@ -31,11 +30,10 @@ object DiffUtil { !Character.isMirrored(c) && !Character.isWhitespace(c) && c != ansiColorToken } splitTokens(rest, token :: acc) - } /** @return a tuple of the (found, expected, changedPercentage) diffs as strings */ - def mkColoredTypeDiff(found: String, expected: String): (String, String, Double) = { + def mkColoredTypeDiff(found: String, expected: String): (String, String, Double) = var totalChange = 0 val foundTokens = splitTokens(found, Nil).toArray val expectedTokens = splitTokens(expected, Nil).toArray @@ -58,7 +56,6 @@ object DiffUtil { }.mkString (fnd, exp, totalChange.toDouble / (expected.length + found.length)) - } /** * Return a colored diff between the tokens of every line in `expected` and `actual`. Each line of @@ -69,19 +66,17 @@ object DiffUtil { * @return A string with one element of `expected` and `actual` on each lines, where * differences are highlighted. */ - def mkColoredLineDiff(expected: Seq[String], actual: Seq[String]): String = { + def mkColoredLineDiff(expected: Seq[String], actual: Seq[String]): String = val expectedSize = EOF.length max expected.maxBy(_.length).length actual.padTo(expected.length, "").zip(expected.padTo(actual.length, "")).map { case (act, exp) => mkColoredLineDiff(exp, act, expectedSize) }.mkString(System.lineSeparator) - } - def mkColoredLineDiff(expected: String, actual: String, expectedSize: Int): String = { - lazy val diff = { + def mkColoredLineDiff(expected: String, actual: String, expectedSize: Int): String = + lazy val diff = val tokens = splitTokens(expected, Nil).toArray val lastTokens = splitTokens(actual, Nil).toArray hirschberg(lastTokens, tokens) - } val expectedDiff = if (expected eq EOF) eof() @@ -104,9 +99,8 @@ object DiffUtil { val pad = " " * 0.max(expectedSize - expected.length) expectedDiff + pad + " | " + actualDiff - } - def mkColoredCodeDiff(code: String, lastCode: String, printDiffDel: Boolean): String = { + def mkColoredCodeDiff(code: String, lastCode: String, printDiffDel: Boolean): String = val tokens = splitTokens(code, Nil).toArray val lastTokens = splitTokens(lastCode, Nil).toArray @@ -119,20 +113,17 @@ object DiffUtil { case Modified(_, str) => added(str) case Deleted(str) if printDiffDel => deleted(str) }.mkString - } private def added(str: String): String = bgColored(str, Console.GREEN) private def deleted(str: String) = bgColored(str, Console.RED) private def bgColored(str: String, color: String): String = if (str.isEmpty) "" - else { + else val (spaces, rest) = str.span(_ == '\n') - if (spaces.isEmpty) { + if (spaces.isEmpty) val (text, rest2) = str.span(_ != '\n') Console.BOLD + color + text + Console.RESET + bgColored(rest2, color) - } else spaces + bgColored(rest, color) - } private def eof() = "\u001B[51m" + "EOF" + Console.RESET private sealed trait Patch @@ -141,7 +132,7 @@ object DiffUtil { private final case class Deleted(str: String) extends Patch private final case class Inserted(str: String) extends Patch - private def hirschberg(a: Array[String], b: Array[String]): Array[Patch] = { + private def hirschberg(a: Array[String], b: Array[String]): Array[Patch] = def build(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = if (x.isEmpty) builder += Inserted(y.mkString) @@ -149,7 +140,7 @@ object DiffUtil { builder += Deleted(x.mkString) else if (x.length == 1 || y.length == 1) needlemanWunsch(x, y, builder) - else { + else val xlen = x.length val xmid = xlen / 2 val ylen = y.length @@ -157,22 +148,19 @@ object DiffUtil { val (x1, x2) = x.splitAt(xmid) val leftScore = nwScore(x1, y) val rightScore = nwScore(x2.reverse, y.reverse) - val scoreSum = (leftScore zip rightScore.reverse).map { + val scoreSum = (leftScore zip rightScore.reverse).map: case (left, right) => left + right - } val max = scoreSum.max val ymid = scoreSum.indexOf(max) val (y1, y2) = y.splitAt(ymid) build(x1, y1, builder) build(x2, y2, builder) - } val builder = Array.newBuilder[Patch] build(a, b, builder) builder.result() - } - private def nwScore(x: Array[String], y: Array[String]): Array[Int] = { + private def nwScore(x: Array[String], y: Array[String]): Array[Int] = def ins(s: String) = -2 def del(s: String) = -2 def sub(s1: String, s2: String) = if (s1 == s2) 2 else -1 @@ -180,19 +168,16 @@ object DiffUtil { val score = Array.fill(x.length + 1, y.length + 1)(0) for (j <- 1 to y.length) score(0)(j) = score(0)(j - 1) + ins(y(j - 1)) - for (i <- 1 to x.length) { + for (i <- 1 to x.length) score(i)(0) = score(i - 1)(0) + del(x(i - 1)) - for (j <- 1 to y.length) { + for (j <- 1 to y.length) val scoreSub = score(i - 1)(j - 1) + sub(x(i - 1), y(j - 1)) val scoreDel = score(i - 1)(j) + del(x(i - 1)) val scoreIns = score(i)(j - 1) + ins(y(j - 1)) score(i)(j) = scoreSub max scoreDel max scoreIns - } - } Array.tabulate(y.length + 1)(j => score(x.length)(j)) - } - private def needlemanWunsch(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = { + private def needlemanWunsch(x: Array[String], y: Array[String], builder: mutable.ArrayBuilder[Patch]): Unit = def similarity(a: String, b: String) = if (a == b) 3 else -1 val d = 1 val score = Array.tabulate(x.length + 1, y.length + 1) { (i, j) => @@ -201,34 +186,28 @@ object DiffUtil { else 0 } for (i <- 1 to x.length) - for (j <- 1 to y.length) { + for (j <- 1 to y.length) val mtch = score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1)) val delete = score(i - 1)(j) + d val insert = score(i)(j - 1) + d score(i)(j) = mtch max insert max delete - } var alignment = List.empty[Patch] var i = x.length var j = y.length while (i > 0 || j > 0) - if (i > 0 && j > 0 && score(i)(j) == score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1))) { + if (i > 0 && j > 0 && score(i)(j) == score(i - 1)(j - 1) + similarity(x(i - 1), y(j - 1))) val newHead = if (x(i - 1) == y(j - 1)) Unmodified(x(i - 1)) else Modified(x(i - 1), y(j - 1)) alignment = newHead :: alignment i = i - 1 j = j - 1 - } - else if (i > 0 && score(i)(j) == score(i - 1)(j) + d) { + else if (i > 0 && score(i)(j) == score(i - 1)(j) + d) alignment = Deleted(x(i - 1)) :: alignment i = i - 1 - } - else { + else alignment = Inserted(y(j - 1)) :: alignment j = j - 1 - } builder ++= alignment - } -} diff --git a/compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala b/compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala index f3375028c95f..f451b2694b38 100644 --- a/compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala +++ b/compiler/src/dotty/tools/dotc/util/FreshNameCreator.scala @@ -7,12 +7,11 @@ import core.Names.TermName import core.NameKinds.UniqueNameKind import core.StdNames.str -abstract class FreshNameCreator { +abstract class FreshNameCreator: def newName(prefix: TermName, unique: UniqueNameKind): TermName -} -object FreshNameCreator { - class Default extends FreshNameCreator { +object FreshNameCreator: + class Default extends FreshNameCreator: protected var counter: Int = 0 protected val counters: mutable.Map[String, Int] = mutable.AnyRefMap() withDefaultValue 0 @@ -21,10 +20,7 @@ object FreshNameCreator { * that the returned name has never been returned by a previous * call to this function (provided the prefix does not end in a digit). */ - def newName(prefix: TermName, unique: UniqueNameKind): TermName = { + def newName(prefix: TermName, unique: UniqueNameKind): TermName = val key = str.sanitize(prefix.toString) + unique.separator counters(key) += 1 prefix.derived(unique.NumberedInfo(counters(key))) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/util/HashSet.scala b/compiler/src/dotty/tools/dotc/util/HashSet.scala index a6e1532c804f..471a4ee50270 100644 --- a/compiler/src/dotty/tools/dotc/util/HashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/HashSet.scala @@ -26,7 +26,7 @@ object HashSet: * However, a table of size up to DenseLimit will be re-sized only * once the number of elements reaches the table's size. */ -class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T] { +class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends MutableSet[T]: import HashSet.DenseLimit private var used: Int = _ @@ -186,4 +186,3 @@ class HashSet[T](initialCapacity: Int = 8, capacityMultiple: Int = 2) extends Mu val prefix = if isDense then "HashSet(dense)." else "HashSet." val suffix = getClass.getSimpleName s"$prefix$op $suffix" -} diff --git a/compiler/src/dotty/tools/dotc/util/LRUCache.scala b/compiler/src/dotty/tools/dotc/util/LRUCache.scala index 99ee8a80227b..f2dcb6a9950b 100644 --- a/compiler/src/dotty/tools/dotc/util/LRUCache.scala +++ b/compiler/src/dotty/tools/dotc/util/LRUCache.scala @@ -17,7 +17,7 @@ import annotation.tailrec * get promoted to be first in the queue. Elements are evicted * at the `last` position. */ -class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag] { +class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag]: import LRUCache._ val keys: Array[Key] = new Array[Key](Retained) val values: Array[Value] = new Array(Retained) @@ -31,29 +31,24 @@ class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag] { * As a side effect, sets `lastButOne` to the element before `last` * if key was not found. */ - def lookup(key: Key): Value = { + def lookup(key: Key): Value = @tailrec - def lookupNext(prev: Int, current: Int, nx: SixteenNibbles): Value = { + def lookupNext(prev: Int, current: Int, nx: SixteenNibbles): Value = val follow = nx(current) - if (keys(current) eq key) { + if (keys(current) eq key) // arrange so that found element is at position `first`. if (current == last) last = prev - else if (prev != last) { + else if (prev != last) next = next.updated(prev, follow) next = next.updated(current, first) next = next.updated(last, current) - } values(current) - } - else if (current == last) { + else if (current == last) lastButOne = prev null - } else lookupNext(current, follow, nx) - } lookupNext(last, first, next) - } /** Enter key/value in cache at position `last`. * As a side effect, sets `last` to `lastButOne`. @@ -62,37 +57,33 @@ class LRUCache[Key >: Null <: AnyRef : ClassTag, Value >: Null: ClassTag] { * first in the queue. If there was no preceding lookup, the element * is inserted at a random position in the queue. */ - def enter(key: Key, value: Value): Unit = { + def enter(key: Key, value: Value): Unit = keys(last) = key values(last) = value last = lastButOne - } /** Invalidate key. The invalidated element becomes * the last in the queue. */ def invalidate(key: Key): Unit = - if (lookup(key) != null) { + if (lookup(key) != null) keys(first) = null last = first - } def indices: Iterator[Int] = Iterator.iterate(first)(next.apply) def keysIterator: Iterator[Key] = indices take Retained map keys filter (_ != null) - override def toString: String = { + override def toString: String = val assocs = keysIterator .toList // double reverse so that lookups do not perturb order .reverse .map(key => s"$key -> ${lookup(key)}") .reverse s"LRUCache(${assocs.mkString(", ")})" - } -} -object LRUCache { +object LRUCache: /** The number of retained elements in the cache; must be at most 16. */ val Retained: Int = 16 @@ -101,4 +92,3 @@ object LRUCache { val initialRing: SixteenNibbles = (0 until Retained).foldLeft(new SixteenNibbles(0L))((nibbles, idx) => nibbles.updated(idx, (idx + 1) % Retained)) -} diff --git a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala index 61cf238fbc7f..31755d4d8da5 100644 --- a/compiler/src/dotty/tools/dotc/util/NameTransformer.scala +++ b/compiler/src/dotty/tools/dotc/util/NameTransformer.scala @@ -10,7 +10,7 @@ import scala.annotation.internal.sharable /** Provides functions to encode and decode Scala symbolic names. */ -object NameTransformer { +object NameTransformer: private val nops = 128 private val ncodes = 26 * 26 @@ -19,11 +19,10 @@ object NameTransformer { @sharable private val op2code = new Array[String](nops) @sharable private val code2op = new Array[OpCodes](ncodes) - private def enterOp(op: Char, code: String) = { + private def enterOp(op: Char, code: String) = op2code(op.toInt) = code val c = (code.charAt(1) - 'a') * 26 + code.charAt(2) - 'a' code2op(c.toInt) = new OpCodes(op, code, code2op(c)) - } /* Note: decoding assumes opcodes are only ever lowercase. */ enterOp('~', "$tilde") @@ -54,98 +53,80 @@ object NameTransformer { * `name`, this is considered acceptable since '$' is a reserved character in * the Scala spec as well as the Java spec. */ - def encode(name: SimpleName): SimpleName = { + def encode(name: SimpleName): SimpleName = var buf: StringBuilder = null val len = name.length var i = 0 - while (i < len) { + while (i < len) val c = name(i) - if (c < nops && (op2code(c.toInt) ne null)) { - if (buf eq null) { + if (c < nops && (op2code(c.toInt) ne null)) + if (buf eq null) buf = new StringBuilder() buf.append(name.sliceToString(0, i)) - } buf.append(op2code(c.toInt)) /* Handle glyphs that are not valid Java/JVM identifiers */ - } - else if (!Character.isJavaIdentifierPart(c)) { - if (buf eq null) { + else if (!Character.isJavaIdentifierPart(c)) + if (buf eq null) buf = new StringBuilder() buf.append(name.sliceToString(0, i)) - } buf.append("$u%04X".format(c.toInt)) - } - else if (buf ne null) { + else if (buf ne null) buf.append(c) - } i += 1 - } if (buf eq null) name else termName(buf.toString) - } /** Replace operator expansions by the operators themselves, * and decode `$u....` expansions into unicode characters. */ - def decode(name: SimpleName): SimpleName = { + def decode(name: SimpleName): SimpleName = //System.out.println("decode: " + name);//DEBUG var buf: StringBuilder = null val len = name.length var i = 0 - while (i < len) { + while (i < len) var ops: OpCodes = null var unicode = false val c = name(i) - if (c == '$' && i + 2 < len) { + if (c == '$' && i + 2 < len) val ch1 = name(i + 1) - if ('a' <= ch1 && ch1 <= 'z') { + if ('a' <= ch1 && ch1 <= 'z') val ch2 = name(i + 2) - if ('a' <= ch2 && ch2 <= 'z') { + if ('a' <= ch2 && ch2 <= 'z') ops = code2op((ch1 - 'a') * 26 + ch2 - 'a') while ((ops ne null) && !name.startsWith(ops.code, i)) ops = ops.next - if (ops ne null) { - if (buf eq null) { + if (ops ne null) + if (buf eq null) buf = new StringBuilder() buf.append(name.sliceToString(0, i)) - } buf.append(ops.op) i += ops.code.length() - } /* Handle the decoding of Unicode glyphs that are * not valid Java/JVM identifiers */ - } else if ((len - i) >= 6 && // Check that there are enough characters left - ch1 == 'u' && + else if ((len - i) >= 6 && // Check that there are enough characters left + ch1 == 'u' && ((Character.isDigit(ch2)) || - ('A' <= ch2 && ch2 <= 'F'))) { + ('A' <= ch2 && ch2 <= 'F'))) /* Skip past "$u", next four should be hexadecimal */ val hex = name.sliceToString(i+2, i+6) - try { + try val str = Integer.parseInt(hex, 16).toChar - if (buf eq null) { + if (buf eq null) buf = new StringBuilder() buf.append(name.sliceToString(0, i)) - } buf.append(str) /* 2 for "$u", 4 for hexadecimal number */ i += 6 unicode = true - } catch { + catch case _:NumberFormatException => /* `hex` did not decode to a hexadecimal number, so * do nothing. */ - } - } - } - } /* If we didn't see an opcode or encoded Unicode glyph, and the buffer is non-empty, write the current character and advance one */ - if ((ops eq null) && !unicode) { + if ((ops eq null) && !unicode) if (buf ne null) buf.append(c) i += 1 - } - } //System.out.println("= " + (if (buf == null) name else buf.toString()));//DEBUG if (buf eq null) name else termName(buf.toString) - } -} diff --git a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala index ac724f7e336f..a59ebb9c67ee 100644 --- a/compiler/src/dotty/tools/dotc/util/ParsedComment.scala +++ b/compiler/src/dotty/tools/dotc/util/ParsedComment.scala @@ -16,7 +16,7 @@ import scala.collection.immutable.ListMap * * @param comment The doc comment to parse */ -class ParsedComment(val comment: Comment) { +class ParsedComment(val comment: Comment): /** * The bounds of a section that represents the [start; end[ char offset @@ -40,13 +40,11 @@ class ParsedComment(val comment: Comment) { /** * The "main" documentation for this comment. That is, the comment before any section starts. */ - lazy val mainDoc: String = { - val doc = tagIndex match { + lazy val mainDoc: String = + val doc = tagIndex match case Nil => content.stripSuffix("*/") case (start, _) :: _ => content.slice(0, start) - } clean(doc.stripPrefix("/**")) - } /** * Renders this comment as markdown. @@ -93,9 +91,8 @@ class ParsedComment(val comment: Comment) { * @return The cleaned string. */ private def clean(str: String): String = str.stripMargin('*').trim -} -object ParsedComment { +object ParsedComment: /** * Return the `ParsedComment` associated with `symbol`, if it exists. @@ -103,14 +100,13 @@ object ParsedComment { * @param symbol The symbol for which to retrieve the documentation * @return If it exists, the `ParsedComment` for `symbol`. */ - def docOf(symbol: Symbol)(using Context): Option[ParsedComment] = { + def docOf(symbol: Symbol)(using Context): Option[ParsedComment] = val documentedSymbol = if (symbol.isPrimaryConstructor) symbol.owner else symbol for { docCtx <- ctx.docCtx comment <- docCtx.docstring(documentedSymbol) } yield new ParsedComment(comment) - } @scala.annotation.internal.sharable private val prefixRegex = """@param\s+\w+\s+""".r @@ -138,13 +134,12 @@ object ParsedComment { * @param items The items to format into a list. * @return A markdown list of descriptions. */ - private def toDescriptionList(ctx: Context, items: List[String]): String = inContext(ctx) { + private def toDescriptionList(ctx: Context, items: List[String]): String = inContext(ctx): val formattedItems = items.map { p => val name :: rest = p.split(" ", 2).toList: @unchecked s"${bold(name)} ${rest.mkString("").trim}" } toMarkdownList(ctx, formattedItems) - } /** * Formats a list of items into a markdown list. @@ -152,10 +147,9 @@ object ParsedComment { * @param items The items to put in a list. * @return The list of items, in markdown. */ - private def toMarkdownList(ctx: Context, items: List[String]): String = { + private def toMarkdownList(ctx: Context, items: List[String]): String = val formattedItems = items.map(_.linesIterator.mkString(System.lineSeparator + " ")) formattedItems.mkString(" - ", System.lineSeparator + " - ", "") - } /** * If the color is enabled, add syntax highlighting to each of `snippets`, otherwise wrap each @@ -178,14 +172,13 @@ object ParsedComment { * @param snippet The code snippet * @return `snippet`, wrapped in a code fence. */ - private def toCodeFence(language: String)(ctx: Context, snippet: String): String = inContext(ctx) { + private def toCodeFence(language: String)(ctx: Context, snippet: String): String = inContext(ctx): if colorEnabled then SyntaxHighlighting.highlight(snippet) else s"""```$language |$snippet |```""".stripMargin - } /** * Format the elements of documentation associated with a given tag using `fn`, and starts the @@ -194,7 +187,7 @@ object ParsedComment { * @param title The title to give to the formatted items. * @param fn The formatting function to use. */ - private case class TagFormatter(title: String, fn: (Context, List[String]) => String) { + private case class TagFormatter(title: String, fn: (Context, List[String]) => String): /** * Format `item` using `fn` if `items` is not empty. @@ -202,15 +195,13 @@ object ParsedComment { * @param items The items to format * @return If items is not empty, the items formatted using `fn`. */ - def apply(items: List[String])(using Context): Option[String] = items match { + def apply(items: List[String])(using Context): Option[String] = items match case Nil => None case items => Some(s"""${bold(title)} |${fn(ctx, items)} |""".stripMargin) - } - } /** Is the color enabled in the context? */ private def colorEnabled(using Context): Boolean = @@ -220,5 +211,4 @@ object ParsedComment { private def bold(str: String)(using Context): String = if (colorEnabled) s"$BOLD$str$RESET" else s"**$str**" -} diff --git a/compiler/src/dotty/tools/dotc/util/Property.scala b/compiler/src/dotty/tools/dotc/util/Property.scala index ebcd43ae5a11..bad89999ba5c 100644 --- a/compiler/src/dotty/tools/dotc/util/Property.scala +++ b/compiler/src/dotty/tools/dotc/util/Property.scala @@ -3,7 +3,7 @@ package dotty.tools.dotc.util /** Defines a key type with which to tag properties, such as attachments * or context properties */ -object Property { +object Property: /** The class of keys for properties of type V */ class Key[+V] @@ -15,4 +15,3 @@ object Property { * is copied. */ class StickyKey[+V] extends Key[V] -} diff --git a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala index 4dd897dd082a..5ec12dd76834 100644 --- a/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala +++ b/compiler/src/dotty/tools/dotc/util/ReusableInstance.scala @@ -14,20 +14,17 @@ import scala.util.chaining._ * * Ported from scala.reflect.internal.util.ReusableInstance */ -final class ReusableInstance[T <: AnyRef] private (make: => T) { +final class ReusableInstance[T <: AnyRef] private (make: => T): private[this] val cache = new ArrayBuffer[T](ReusableInstance.InitialSize).tap(_.addOne(make)) private[this] var taken = 0 - inline def withInstance[R](action: T => R): R ={ + inline def withInstance[R](action: T => R): R = if (taken == cache.size) cache += make taken += 1 try action(cache(taken-1)) finally taken -= 1 - } -} -object ReusableInstance { +object ReusableInstance: private inline val InitialSize = 4 def apply[T <: AnyRef](make: => T): ReusableInstance[T] = new ReusableInstance[T](make) -} diff --git a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala index f85a57a8f812..2571d94f68c8 100644 --- a/compiler/src/dotty/tools/dotc/util/ShowPickled.scala +++ b/compiler/src/dotty/tools/dotc/util/ShowPickled.scala @@ -10,15 +10,14 @@ import java.lang.Double.longBitsToDouble import core.unpickleScala2.PickleBuffer import core.Names._ -object ShowPickled { +object ShowPickled: import core.unpickleScala2.PickleFormat._ - case class PickleBufferEntry(num: Int, startIndex: Int, tag: Int, bytes: Array[Byte]) { + case class PickleBufferEntry(num: Int, startIndex: Int, tag: Int, bytes: Array[Byte]): def isName: Boolean = tag == TERMname || tag == TYPEname - def hasName: Boolean = tag match { + def hasName: Boolean = tag match case TYPEsym | ALIASsym | CLASSsym | MODULEsym | VALsym | EXTref | EXTMODCLASSref => true case _ => false - } def readName: String = if (isName) new String(bytes, StandardCharsets.UTF_8) else sys.error("%s is no name" format tagName) @@ -28,26 +27,21 @@ object ShowPickled { def tagName: String = tag2string(tag) override def toString: String = "%d,%d: %s".format(num, startIndex, tagName) - } - case class PickleBufferEntryList(entries: IndexedSeq[PickleBufferEntry]) { - def nameAt(idx: Int): String = { + case class PickleBufferEntryList(entries: IndexedSeq[PickleBufferEntry]): + def nameAt(idx: Int): String = val entry = entries(idx) if (entry.isName) entry.readName else if (entry.hasName) entries(entry.nameIndex).readName else "?" - } - } - def makeEntryList(buf: PickleBuffer, index: Array[Int]): PickleBufferEntryList = { - val entries = buf.toIndexedSeq.zipWithIndex map { + def makeEntryList(buf: PickleBuffer, index: Array[Int]): PickleBufferEntryList = + val entries = buf.toIndexedSeq.zipWithIndex map: case ((tag, data), num) => PickleBufferEntry(num, index(num), tag, data) - } PickleBufferEntryList(entries) - } - def tag2string(tag: Int): String = tag match { + def tag2string(tag: Int): String = tag match case TERMname => "TERMname" case TYPEname => "TYPEname" case NONEsym => "NONEsym" @@ -95,11 +89,10 @@ object ShowPickled { case MODIFIERS => "MODIFIERS" case _ => "***BAD TAG***(" + tag + ")" - } /** Extremely regrettably, essentially copied from PickleBuffer. */ - def readNat(data: Array[Byte], index: Int): Int = { + def readNat(data: Array[Byte], index: Int): Int = var idx = index var result = 0L var b = 0L @@ -111,9 +104,8 @@ object ShowPickled { }) () result.toInt - } - def printFile(buf: PickleBuffer, out: PrintStream = System.out.nn): Unit = { + def printFile(buf: PickleBuffer, out: PrintStream = System.out.nn): Unit = out.println("Version " + buf.readNat() + "." + buf.readNat()) val index = buf.createIndex val entryList = makeEntryList(buf, index) @@ -121,13 +113,12 @@ object ShowPickled { def p(s: String) = out print s - def printNameRef(): Unit = { + def printNameRef(): Unit = val idx = buf.readNat() val name = entryList nameAt idx val toPrint = " %s(%s)".format(idx, name) out print toPrint - } def printNat() = p(" " + buf.readNat()) def printReadNat(x: Int) = p(" " + x) @@ -139,11 +130,11 @@ object ShowPickled { def printConstAnnotArgRef() = printNat() def printAnnotArgRef() = printNat() - def printSymInfo(end: Int, isType: Boolean): Unit = { + def printSymInfo(end: Int, isType: Boolean): Unit = printNameRef() printSymbolRef() val pflags = buf.readLongNat() - def printFlags(privateWithin: Option[Int]) = { + def printFlags(privateWithin: Option[Int]) = val accessBoundary = ( for (idx <- privateWithin) yield { val s = entryList nameAt idx @@ -152,25 +143,21 @@ object ShowPickled { ) val flagString = PickleBuffer.unpickleScalaFlags(pflags, isType).toString out.print(" %s[%s]".format(toHexString(pflags), flagString)) - } /** Might be info or privateWithin */ val x = buf.readNat() - if (buf.readIndex == end) { + if (buf.readIndex == end) printFlags(None) printReadNat(x) - } - else { + else printFlags(Some(x)) printTypeRef() - } - } /** Note: the entries which require some semantic analysis to be correctly * interpreted are for the most part going to tell you the wrong thing. * It's not so easy to duplicate the logic applied in the UnPickler. */ - def printEntry(i: Int): Unit = { + def printEntry(i: Int): Unit = buf.readIndex = index(i) p(s"$i,${buf.readIndex}: ") val tag = buf.readByte() @@ -178,7 +165,7 @@ object ShowPickled { val len = buf.readNat() val end = len + buf.readIndex p(s" $len:") - tag match { + tag match case TERMname => out.print(" ") out.print(termName(buf.bytes, buf.readIndex, len).toString) @@ -249,17 +236,13 @@ object ShowPickled { printTypeRef(); buf.until(end, () => printSymbolRef()) case _ => - } out.println() if (buf.readIndex != end) out.println("BAD ENTRY END: computed = %d, actual = %d, bytes = %s".format( end, buf.readIndex, buf.bytes.slice(index(i), (end max buf.readIndex)).mkString(", ") )) - } for (i <- 0 until index.length) printEntry(i) - } -} /* * diff --git a/compiler/src/dotty/tools/dotc/util/Signatures.scala b/compiler/src/dotty/tools/dotc/util/Signatures.scala index 5513a1f803c6..339e4c37d6e1 100644 --- a/compiler/src/dotty/tools/dotc/util/Signatures.scala +++ b/compiler/src/dotty/tools/dotc/util/Signatures.scala @@ -17,7 +17,7 @@ import util.Spans.Span import reporting._ -object Signatures { +object Signatures: /** * Represent a method signature. @@ -46,9 +46,8 @@ object Signatures { * @param doc The documentation of this parameter * @param isImplicit Is this parameter implicit? */ - case class Param(name: String, tpe: String, doc: Option[String] = None, isImplicit: Boolean = false) { + case class Param(name: String, tpe: String, doc: Option[String] = None, isImplicit: Boolean = false): def show: String = if name.nonEmpty then s"$name: $tpe" else tpe - } /** * Extract (current parameter index, function index, functions) method call for given position. @@ -108,17 +107,16 @@ object Signatures { * next subsequent application exists, it returns the latter */ private def findEnclosingApply(path: List[tpd.Tree], span: Span)(using Context): tpd.Tree = - path.filterNot { + path.filterNot: case apply @ Apply(fun, _) => fun.span.contains(span) || isValid(apply) case unapply @ UnApply(fun, _, _) => fun.span.contains(span) || isValid(unapply) case typeTree @ AppliedTypeTree(fun, _) => fun.span.contains(span) || isValid(typeTree) case typeApply @ TypeApply(fun, _) => fun.span.contains(span) || isValid(typeApply) case _ => true - } match { + match case Nil => tpd.EmptyTree case direct :: enclosing :: _ if isClosingSymbol(direct.source(span.end -1)) => enclosing case direct :: _ => direct - } private def isClosingSymbol(ch: Char) = ch == ')' || ch == ']' @@ -363,11 +361,10 @@ object Signatures { /** * Filter returning only members starting with underscore followed with number */ - private object underscoreMembersFilter extends NameFilter { + private object underscoreMembersFilter extends NameFilter: def apply(pre: Type, name: Name)(using Context): Boolean = name.startsWith("_") && name.toString.drop(1).toIntOption.isDefined def isStable = true - } /** * Creates signature for apply method. @@ -376,7 +373,7 @@ object Signatures { * * @return Signature if denot is a function, None otherwise */ - private def toApplySignature(denot: SingleDenotation)(using Context): Option[Signature] = { + private def toApplySignature(denot: SingleDenotation)(using Context): Option[Signature] = val symbol = denot.symbol val docComment = ParsedComment.docOf(symbol) @@ -413,22 +410,20 @@ object Signatures { denot.info.stripPoly match case tpe: (MethodType | AppliedType | TypeRef | TypeParamRef) => val paramss = toParamss(tpe).map(_.filterNot(param => isSyntheticEvidence(param.name))) - val evidenceParams = (tpe.paramNamess.flatten zip tpe.paramInfoss.flatten).flatMap { + val evidenceParams = (tpe.paramNamess.flatten zip tpe.paramInfoss.flatten).flatMap: case (name, AppliedType(tpe, (ref: TypeParamRef) :: _)) if isSyntheticEvidence(name.show) => Some(ref.paramName -> tpe) case _ => None - } val typeParams = denot.info match case poly: PolyType => val tparams = poly.paramNames.zip(poly.paramInfos) tparams.map { (name, info) => - evidenceParams.find((evidenceName: TypeName, _: Type) => name == evidenceName).flatMap { + evidenceParams.find((evidenceName: TypeName, _: Type) => name == evidenceName).flatMap: case (_, tparam) => tparam.show.split('.').lastOption - } match { + match case Some(evidenceTypeName) => s"${name.show}: ${evidenceTypeName}" case None => name.show + info.show - } } case _ => Nil val (name, returnType) = @@ -438,10 +433,9 @@ object Signatures { (denot.name.show, Some(tpe.finalResultType.widenTermRefExpr.show)) Some(Signatures.Signature(name, typeParams, paramss, returnType, docComment.map(_.mainDoc), Some(denot))) case other => None - } @deprecated("Deprecated in favour of `signatureHelp` which now returns Signature along SingleDenotation", "3.1.3") - def toSignature(denot: SingleDenotation)(using Context): Option[Signature] = { + def toSignature(denot: SingleDenotation)(using Context): Option[Signature] = if denot.name.isUnapplyName then val resultType = denot.info.stripPoly.finalResultType match case methodType: MethodType => methodType.resultType.widen @@ -455,7 +449,6 @@ object Signatures { toUnapplySignature(denot.asSingleDenotation, paramNames, paramTypes) else toApplySignature(denot) - } /** * Creates signature for unapply method. It is different from apply one as it should not show function name, @@ -515,7 +508,7 @@ object Signatures { * @return A pair composed of the index of the best alternative (0 if no alternatives * were found), and the list of alternatives. */ - private def alternativesFromError(err: ErrorType, params: List[tpd.Tree])(using Context): (Int, List[SingleDenotation]) = { + private def alternativesFromError(err: ErrorType, params: List[tpd.Tree])(using Context): (Int, List[SingleDenotation]) = val alternatives = err.msg match case msg: AmbiguousOverload => msg.alternatives @@ -543,7 +536,5 @@ object Signatures { else alternativesScores.zipWithIndex.maxBy(_._1)._2 (bestAlternative, alternatives) - } -} diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala index 2f202bc05921..f5d954dde72a 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentityMap.scala @@ -5,7 +5,7 @@ import collection.mutable.ListBuffer /** A simple linked map with `eq` as the key comparison, optimized for small maps. * It has linear complexity for `apply`, `updated`, and `remove`. */ -abstract class SimpleIdentityMap[K <: AnyRef, +V <: AnyRef] extends (K => V | Null) { +abstract class SimpleIdentityMap[K <: AnyRef, +V <: AnyRef] extends (K => V | Null): final def isEmpty: Boolean = this eq SimpleIdentityMap.myEmpty def size: Int def apply(k: K): V | Null @@ -15,24 +15,21 @@ abstract class SimpleIdentityMap[K <: AnyRef, +V <: AnyRef] extends (K => V | Nu def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleIdentityMap[K, V1] def foreachBinding(f: (K, V) => Unit): Unit def forallBinding(f: (K, V) => Boolean): Boolean - def map2[T](f: (K, V) => T): List[T] = { + def map2[T](f: (K, V) => T): List[T] = val buf = new ListBuffer[T] foreachBinding((k, v) => buf += f(k, v)) buf.toList - } def keys: List[K] = map2((k, v) => k) def toList: List[(K, V)] = map2((k, v) => (k, v)) - override def toString: String = { + override def toString: String = def assocToString(key: K, value: V) = s"$key -> $value" map2(assocToString) mkString ("(", ", ", ")") - } -} -object SimpleIdentityMap { +object SimpleIdentityMap: private val CompactifyThreshold = 4 - private object myEmpty extends SimpleIdentityMap[AnyRef, Nothing] { + private object myEmpty extends SimpleIdentityMap[AnyRef, Nothing]: def size = 0 def apply(k: AnyRef) = null def remove(k: AnyRef) = this @@ -40,11 +37,10 @@ object SimpleIdentityMap { def mapValuesNow[V1 <: AnyRef](f: (AnyRef, V1) => V1) = this def foreachBinding(f: (AnyRef, Nothing) => Unit) = () def forallBinding(f: (AnyRef, Nothing) => Boolean) = true - } def empty[K <: AnyRef]: SimpleIdentityMap[K, Nothing] = myEmpty.asInstanceOf[SimpleIdentityMap[K, Nothing]] - class Map1[K <: AnyRef, +V <: AnyRef] (k1: K, v1: V) extends SimpleIdentityMap[K, V] { + class Map1[K <: AnyRef, +V <: AnyRef] (k1: K, v1: V) extends SimpleIdentityMap[K, V]: def size: Int = 1 def apply(k: K): V | Null = if (k eq k1) v1 @@ -55,15 +51,13 @@ object SimpleIdentityMap { def updated[V1 >: V <: AnyRef](k: K, v: V1): SimpleIdentityMap[K, V1] = if (k eq k1) new Map1(k, v) else new Map2(k1, v1, k, v) - def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleIdentityMap[K, V1] = { + def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleIdentityMap[K, V1] = val w1 = f(k1, v1) if (v1 eq w1) this else new Map1(k1, w1) - } def foreachBinding(f: (K, V) => Unit): Unit = f(k1, v1) def forallBinding(f: (K, V) => Boolean): Boolean = f(k1, v1) - } - class Map2[K <: AnyRef, +V <: AnyRef] (k1: K, v1: V, k2: K, v2: V) extends SimpleIdentityMap[K, V] { + class Map2[K <: AnyRef, +V <: AnyRef] (k1: K, v1: V, k2: K, v2: V) extends SimpleIdentityMap[K, V]: def size: Int = 2 def apply(k: K): V | Null = if (k eq k1) v1 @@ -77,16 +71,14 @@ object SimpleIdentityMap { if (k eq k1) new Map2(k, v, k2, v2) else if (k eq k2) new Map2(k1, v1, k, v) else new Map3(k1, v1, k2, v2, k, v) - def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleIdentityMap[K, V1] = { + def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleIdentityMap[K, V1] = val w1 = f(k1, v1); val w2 = f(k2, v2) if ((v1 eq w1) && (v2 eq w2)) this else new Map2(k1, w1, k2, w2) - } def foreachBinding(f: (K, V) => Unit): Unit = { f(k1, v1); f(k2, v2) } def forallBinding(f: (K, V) => Boolean): Boolean = f(k1, v1) && f(k2, v2) - } - class Map3[K <: AnyRef, +V <: AnyRef] (k1: K, v1: V, k2: K, v2: V, k3: K, v3: V) extends SimpleIdentityMap[K, V] { + class Map3[K <: AnyRef, +V <: AnyRef] (k1: K, v1: V, k2: K, v2: V, k3: K, v3: V) extends SimpleIdentityMap[K, V]: def size: Int = 3 def apply(k: K): V | Null = if (k eq k1) v1 @@ -103,16 +95,14 @@ object SimpleIdentityMap { else if (k eq k2) new Map3(k1, v1, k, v, k3, v3) else if (k eq k3) new Map3(k1, v1, k2, v2, k, v) else new Map4(k1, v1, k2, v2, k3, v3, k, v) - def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleIdentityMap[K, V1] = { + def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleIdentityMap[K, V1] = val w1 = f(k1, v1); val w2 = f(k2, v2); val w3 = f(k3, v3) if ((v1 eq w1) && (v2 eq w2) && (v3 eq w3)) this else new Map3(k1, w1, k2, w2, k3, w3) - } def foreachBinding(f: (K, V) => Unit): Unit = { f(k1, v1); f(k2, v2); f(k3, v3) } def forallBinding(f: (K, V) => Boolean): Boolean = f(k1, v1) && f(k2, v2) && f(k3, v3) - } - class Map4[K <: AnyRef, +V <: AnyRef] (k1: K, v1: V, k2: K, v2: V, k3: K, v3: V, k4: K, v4: V) extends SimpleIdentityMap[K, V] { + class Map4[K <: AnyRef, +V <: AnyRef] (k1: K, v1: V, k2: K, v2: V, k3: K, v3: V, k4: K, v4: V) extends SimpleIdentityMap[K, V]: def size: Int = 4 def apply(k: K): V | Null = if (k eq k1) v1 @@ -132,88 +122,73 @@ object SimpleIdentityMap { else if (k eq k3) new Map4(k1, v1, k2, v2, k, v, k4, v4) else if (k eq k4) new Map4(k1, v1, k2, v2, k3, v3, k, v) else new MapMore(Array[AnyRef](k1, v1, k2, v2, k3, v3, k4, v4, k, v)) - def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleIdentityMap[K, V1] = { + def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleIdentityMap[K, V1] = val w1 = f(k1, v1); val w2 = f(k2, v2); val w3 = f(k3, v3); val w4 = f(k4, v4) if ((v1 eq w1) && (v2 eq w2) && (v3 eq w3) && (v4 eq w4)) this else new Map4(k1, w1, k2, w2, k3, w3, k4, w4) - } def foreachBinding(f: (K, V) => Unit): Unit = { f(k1, v1); f(k2, v2); f(k3, v3); f(k4, v4) } def forallBinding(f: (K, V) => Boolean): Boolean = f(k1, v1) && f(k2, v2) && f(k3, v3) && f(k4, v4) - } - class MapMore[K <: AnyRef, +V <: AnyRef](bindings: Array[AnyRef]) extends SimpleIdentityMap[K, V] { + class MapMore[K <: AnyRef, +V <: AnyRef](bindings: Array[AnyRef]) extends SimpleIdentityMap[K, V]: private def key(i: Int): K = bindings(i).asInstanceOf[K] private def value(i: Int): V = bindings(i + 1).asInstanceOf[V] def size: Int = bindings.length / 2 Stats.record(s"SimpleIdentityMap/$size") - def apply(k: K): V | Null = { + def apply(k: K): V | Null = var i = 0 - while (i < bindings.length) { + while (i < bindings.length) if (bindings(i) eq k) return value(i) i += 2 - } null - } - def remove(k: K): SimpleIdentityMap[K, V] = { + def remove(k: K): SimpleIdentityMap[K, V] = var i = 0 - while (i < bindings.length) { + while (i < bindings.length) if (bindings(i) eq k) - return { - if (size == CompactifyThreshold) { + return + if (size == CompactifyThreshold) var m: SimpleIdentityMap[K, V] = empty[K] for (j <- 0 until bindings.length by 2) if (j != i) m = m.updated(key(j), value(j)) m - } - else { + else val bindings1 = new Array[AnyRef](bindings.length - 2) System.arraycopy(bindings, 0, bindings1, 0, i) System.arraycopy(bindings, i + 2, bindings1, i, bindings1.length - i) new MapMore(bindings1) - } - } i += 2 - } this - } - def updated[V1 >: V <: AnyRef](k: K, v: V1): SimpleIdentityMap[K, V] = { + def updated[V1 >: V <: AnyRef](k: K, v: V1): SimpleIdentityMap[K, V] = var i = 0 - while (i < bindings.length) { + while (i < bindings.length) if (bindings(i) eq k) - return { + return if (v eq bindings(i + 1)) this - else { + else val bindings1 = bindings.clone bindings1(i + 1) = v new MapMore(bindings1) - } - } i += 2 - } val bindings2 = new Array[AnyRef](bindings.length + 2) System.arraycopy(bindings, 0, bindings2, 0, bindings.length) bindings2(bindings.length) = k bindings2(bindings.length + 1) = v new MapMore(bindings2) - } - override def contains(k: K): Boolean = { + override def contains(k: K): Boolean = var i = 0 - while (i < bindings.length) { + while (i < bindings.length) if (bindings(i) eq k) return true i += 2 - } false - } - def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleIdentityMap[K, V1] = { + def mapValuesNow[V1 >: V <: AnyRef](f: (K, V1) => V1): SimpleIdentityMap[K, V1] = var bindings1: Array[AnyRef] = bindings var i = 0 - while (i < bindings.length) { + while (i < bindings.length) val v = value(i) val v1 = f(key(i), v) if ((v1 ne v) && (bindings1 eq bindings)) @@ -221,26 +196,18 @@ object SimpleIdentityMap { bindings1(i) = bindings(i) bindings1(i + 1) = v1 i += 2 - } if (bindings1 eq bindings) this else new MapMore(bindings1) - } - def foreachBinding(f: (K, V) => Unit): Unit = { + def foreachBinding(f: (K, V) => Unit): Unit = var i = 0 - while (i < bindings.length) { + while (i < bindings.length) f(key(i), value(i)) i += 2 - } - } - def forallBinding(f: (K, V) => Boolean): Boolean = { + def forallBinding(f: (K, V) => Boolean): Boolean = var i = 0 - while (i < bindings.length) { + while (i < bindings.length) if (!f(key(i), value(i))) return false i += 2 - } return true - } - } -} diff --git a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala index dd766dc99c7e..7e00bc34ce43 100644 --- a/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala +++ b/compiler/src/dotty/tools/dotc/util/SimpleIdentitySet.scala @@ -7,7 +7,7 @@ import collection.mutable /** A simple linked set with `eq` as the comparison, optimized for small sets. * It has linear complexity for `contains`, `+`, and `-`. */ -abstract class SimpleIdentitySet[+Elem <: AnyRef] { +abstract class SimpleIdentitySet[+Elem <: AnyRef]: def size: Int def + [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[E] def - [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[Elem] @@ -43,9 +43,8 @@ abstract class SimpleIdentitySet[+Elem <: AnyRef] { this.size == that.size && forall(that.contains) override def toString: String = toList.mkString("{", ", ", "}") -} -object SimpleIdentitySet { +object SimpleIdentitySet: def apply[Elem <: AnyRef](elems: Elem*): SimpleIdentitySet[Elem] = elems.foldLeft(empty: SimpleIdentitySet[Elem])(_ + _) @@ -54,7 +53,7 @@ object SimpleIdentitySet { def intersect(ys: SimpleIdentitySet[E]): SimpleIdentitySet[E] = xs.filter(ys.contains) - object empty extends SimpleIdentitySet[Nothing] { + object empty extends SimpleIdentitySet[Nothing]: def size: Int = 0 def + [E <: AnyRef](x: E): SimpleIdentitySet[E] = new Set1[E](x) @@ -67,9 +66,8 @@ object SimpleIdentitySet { def /: [A, E <: AnyRef](z: A)(f: (A, E) => A): A = z def toList = Nil def iterator = Iterator.empty - } - private class Set1[+Elem <: AnyRef](x0: AnyRef) extends SimpleIdentitySet[Elem] { + private class Set1[+Elem <: AnyRef](x0: AnyRef) extends SimpleIdentitySet[Elem]: def size = 1 def + [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[E] = if (contains(x)) this else new Set2[E](x0, x) @@ -85,9 +83,8 @@ object SimpleIdentitySet { f(z, x0.asInstanceOf[E]) def toList = x0.asInstanceOf[Elem] :: Nil def iterator = Iterator.single(x0.asInstanceOf[Elem]) - } - private class Set2[+Elem <: AnyRef](x0: AnyRef, x1: AnyRef) extends SimpleIdentitySet[Elem] { + private class Set2[+Elem <: AnyRef](x0: AnyRef, x1: AnyRef) extends SimpleIdentitySet[Elem]: def size = 2 def + [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[E] = if (contains(x)) this else new Set3(x0, x1, x) @@ -104,33 +101,29 @@ object SimpleIdentitySet { def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E]) def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: Nil - def iterator = Iterator.tabulate(2) { + def iterator = Iterator.tabulate(2): case 0 => x0.asInstanceOf[Elem] case 1 => x1.asInstanceOf[Elem] - } - } - private class Set3[+Elem <: AnyRef](x0: AnyRef, x1: AnyRef, x2: AnyRef) extends SimpleIdentitySet[Elem] { + private class Set3[+Elem <: AnyRef](x0: AnyRef, x1: AnyRef, x2: AnyRef) extends SimpleIdentitySet[Elem]: def size = 3 def + [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[E] = if (contains(x)) this - else { + else val xs = new Array[AnyRef](4) xs(0) = x0 xs(1) = x1 xs(2) = x2 xs(3) = x new SetN[E](xs) - } def - [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[Elem] = if (x `eq` x0) new Set2(x1, x2) else if (x `eq` x1) new Set2(x0, x2) else if (x `eq` x2) new Set2(x0, x1) else this def contains[E >: Elem <: AnyRef](x: E): Boolean = (x `eq` x0) || (x `eq` x1) || (x `eq` x2) - def foreach(f: Elem => Unit): Unit = { + def foreach(f: Elem => Unit): Unit = f(x0.asInstanceOf[Elem]); f(x1.asInstanceOf[Elem]); f(x2.asInstanceOf[Elem]) - } def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean = p(x0.asInstanceOf[E]) || p(x1.asInstanceOf[E]) || p(x2.asInstanceOf[E]) def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] = @@ -138,24 +131,21 @@ object SimpleIdentitySet { def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = f(f(f(z, x0.asInstanceOf[E]), x1.asInstanceOf[E]), x2.asInstanceOf[E]) def toList = x0.asInstanceOf[Elem] :: x1.asInstanceOf[Elem] :: x2.asInstanceOf[Elem] :: Nil - def iterator = Iterator.tabulate(3) { + def iterator = Iterator.tabulate(3): case 0 => x0.asInstanceOf[Elem] case 1 => x1.asInstanceOf[Elem] case 2 => x2.asInstanceOf[Elem] - } - } - private class SetN[+Elem <: AnyRef](val xs: Array[AnyRef]) extends SimpleIdentitySet[Elem] { + private class SetN[+Elem <: AnyRef](val xs: Array[AnyRef]) extends SimpleIdentitySet[Elem]: def size = xs.length def + [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[E] = if (contains(x)) this - else { + else val xs1 = new Array[AnyRef](size + 1) System.arraycopy(xs, 0, xs1, 0, size) xs1(size) = x new SetN[E](xs1) - } - def - [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[Elem] = { + def - [E >: Elem <: AnyRef](x: E): SimpleIdentitySet[Elem] = var i = 0 while (i < size && (xs(i) `ne` x)) i += 1 if (i == size) this @@ -164,64 +154,54 @@ object SimpleIdentitySet { else if (i == 1) new Set3(xs(0), xs(2), xs(3)) else if (i == 2) new Set3(xs(0), xs(1), xs(3)) else new Set3(xs(0), xs(1), xs(2)) - else { + else val xs1 = new Array[AnyRef](size - 1) System.arraycopy(xs, 0, xs1, 0, i) System.arraycopy(xs, i + 1, xs1, i, size - (i + 1)) new SetN(xs1) - } - } - def contains[E >: Elem <: AnyRef](x: E): Boolean = { + def contains[E >: Elem <: AnyRef](x: E): Boolean = var i = 0 while (i < size && (xs(i) `ne` x)) i += 1 i < size - } - def foreach(f: Elem => Unit): Unit = { + def foreach(f: Elem => Unit): Unit = var i = 0 while (i < size) { f(xs(i).asInstanceOf[Elem]); i += 1 } - } def exists[E >: Elem <: AnyRef](p: E => Boolean): Boolean = xs.asInstanceOf[Array[E]].exists(p) def map[B <: AnyRef](f: Elem => B): SimpleIdentitySet[B] = SetN(xs.map(x => f(x.asInstanceOf[Elem]).asInstanceOf[AnyRef])) def /: [A, E >: Elem <: AnyRef](z: A)(f: (A, E) => A): A = xs.asInstanceOf[Array[E]].foldLeft(z)(f) - def toList: List[Elem] = { + def toList: List[Elem] = val buf = new mutable.ListBuffer[Elem] foreach(buf += _) buf.toList - } def iterator = xs.iterator.asInstanceOf[Iterator[Elem]] override def ++ [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = - that match { + that match case that: SetN[?] => var toAdd: mutable.ArrayBuffer[AnyRef] = null var i = 0 val limit = that.xs.length - while (i < limit) { + while (i < limit) val elem = that.xs(i) - if (!contains(elem)) { + if (!contains(elem)) if (toAdd == null) toAdd = new mutable.ArrayBuffer toAdd += elem - } i += 1 - } if (toAdd == null) this - else { + else val numAdded = toAdd.size val xs1 = new Array[AnyRef](size + numAdded) System.arraycopy(xs, 0, xs1, 0, size) var i = 0 - while (i < numAdded) { + while (i < numAdded) xs1(i + size) = toAdd(i) i += 1 - } new SetN[E](xs1) - } case _ => super.++(that) - } override def -- [E >: Elem <: AnyRef](that: SimpleIdentitySet[E]): SimpleIdentitySet[E] = - that match { + that match case that: SetN[?] => // both sets are large, optimize assuming they are similar // by starting from empty set and adding elements @@ -231,31 +211,24 @@ object SimpleIdentitySet { val thatElems = that.xs var i = 0 var searchStart = 0 - while (i < thisSize) { + while (i < thisSize) val elem = this.xs(i) var j = searchStart // search thatElems in round robin fashion, starting one after latest hit var missing = false - while (!missing && (elem ne thatElems(j))) { + while (!missing && (elem ne thatElems(j))) j += 1 if (j == thatSize) j = 0 missing = j == searchStart - } - if (missing) { + if (missing) if (toAdd == null) toAdd = new mutable.ArrayBuffer toAdd += elem - } else searchStart = (j + 1) % thatSize i += 1 - } if (toAdd == null) empty - else toAdd.size match { + else toAdd.size match case 1 => new Set1[E](toAdd(0)) case 2 => new Set2[E](toAdd(0), toAdd(1)) case 3 => new Set3[E](toAdd(0), toAdd(1), toAdd(2)) case _ => new SetN[E](toAdd.toArray) - } case _ => // this set is large, that set is small: reduce from above using `-` ((this: SimpleIdentitySet[E]) /: that)(_ - _) - } - } -} diff --git a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala index 42286aef5d31..3c6146041c33 100644 --- a/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala +++ b/compiler/src/dotty/tools/dotc/util/SixteenNibbles.scala @@ -4,7 +4,7 @@ package dotty.tools.dotc.util * values 0..15 in a single Long. * */ -class SixteenNibbles(val bits: Long) extends AnyVal { +class SixteenNibbles(val bits: Long) extends AnyVal: import SixteenNibbles._ def apply(idx: Int): Int = @@ -19,10 +19,8 @@ class SixteenNibbles(val bits: Long) extends AnyVal { override def toString: String = s"SixteenNibbles(${elements.mkString(", ")})" -} -object SixteenNibbles { +object SixteenNibbles: inline val Width = 4 inline val Mask = (1 << Width) - 1 final val LongMask: Long = Mask.toLong -} diff --git a/compiler/src/dotty/tools/dotc/util/SourceFile.scala b/compiler/src/dotty/tools/dotc/util/SourceFile.scala index 42d07869f74e..63bee7b5aaa9 100644 --- a/compiler/src/dotty/tools/dotc/util/SourceFile.scala +++ b/compiler/src/dotty/tools/dotc/util/SourceFile.scala @@ -22,7 +22,7 @@ import java.util.Optional import java.util.concurrent.atomic.AtomicInteger import java.util.regex.Pattern -object ScriptSourceFile { +object ScriptSourceFile: @sharable private val headerPattern = Pattern.compile("""^(::)?!#.*(\r|\n|\r\n)""", Pattern.MULTILINE) private val headerStarts = List("#!", "::#!") @@ -30,46 +30,40 @@ object ScriptSourceFile { def hasScriptHeader(content: Array[Char]): Boolean = headerStarts.exists(content.startsWith(_)) - def apply(file: AbstractFile, content: Array[Char]): SourceFile = { + def apply(file: AbstractFile, content: Array[Char]): SourceFile = /** Length of the script header from the given content, if there is one. * The header begins with "#!" or "::#!" and is either a single line, * or it ends with a line starting with "!#" or "::!#", if present. */ val headerLength = - if (headerStarts exists (content startsWith _)) { + if (headerStarts exists (content startsWith _)) val matcher = headerPattern matcher content.mkString if (matcher.find) matcher.end else content.indexOf('\n') // end of first line - } else 0 // overwrite hash-bang lines with all spaces to preserve line numbers val hashBangLines = content.take(headerLength).mkString.split("\\r?\\n") if hashBangLines.nonEmpty then for i <- 0 until headerLength do - content(i) match { + content(i) match case '\r' | '\n' => case _ => content(i) = ' ' - } - new SourceFile(file, content) { + new SourceFile(file, content): override val underlying = new SourceFile(this.file, this.content) - } - } -} -class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends interfaces.SourceFile { +class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends interfaces.SourceFile: import SourceFile._ private var myContent: Array[Char] | Null = null /** The contents of the original source file. Note that this can be empty, for example when * the source is read from Tasty. */ - def content(): Array[Char] = { + def content(): Array[Char] = if (myContent == null) myContent = computeContent myContent - } private var _maybeInComplete: Boolean = false @@ -80,12 +74,10 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends override def jfile: Optional[JFile] = Optional.ofNullable(file.file) override def equals(that: Any): Boolean = - (this `eq` that.asInstanceOf[AnyRef]) || { - that match { + (this `eq` that.asInstanceOf[AnyRef]) `||`: + that match case that : SourceFile => file == that.file && start == that.start case _ => false - } - } override def hashCode: Int = file.hashCode * 41 + start.hashCode @@ -119,7 +111,7 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends def positionInUltimateSource(position: SourcePosition): SourcePosition = SourcePosition(underlying, position.span shift start) - private def calculateLineIndicesFromContents() = { + private def calculateLineIndicesFromContents() = val cs = content() val buf = new ArrayBuffer[Int] buf += 0 @@ -134,7 +126,6 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends i += 1 buf += cs.length // sentinel, so that findLine below works smoother buf.toArray - } private var lineIndicesCache: Array[Int] = _ private def lineIndices: Array[Int] = @@ -171,17 +162,15 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends /** Convert offset to line in this source file * Lines are numbered from 0 */ - def offsetToLine(offset: Int): Int = { + def offsetToLine(offset: Int): Int = lastLine = Util.bestFit(lineIndices, lineIndices.length, offset, lastLine) if (offset >= length) lastLine -= 1 // compensate for the sentinel lastLine - } /** The index of the first character of the line containing position `offset` */ - def startOfLine(offset: Int): Int = { + def startOfLine(offset: Int): Int = require(offset >= 0) lineToOffset(offsetToLine(offset)) - } /** The start index of the line following the one containing position `offset` */ def nextLine(offset: Int): Int = @@ -192,25 +181,21 @@ class SourceFile(val file: AbstractFile, computeContent: => Array[Char]) extends content.slice(startOfLine(offset), nextLine(offset)).mkString /** The column corresponding to `offset`, starting at 0 */ - def column(offset: Int): Int = { + def column(offset: Int): Int = var idx = startOfLine(offset) offset - idx - } /** The padding of the column corresponding to `offset`, includes tabs */ - def startColumnPadding(offset: Int): String = { + def startColumnPadding(offset: Int): String = var idx = startOfLine(offset) val pad = new StringBuilder - while (idx != offset) { + while (idx != offset) pad.append(if (idx < content().length && content()(idx) == '\t') '\t' else ' ') idx += 1 - } pad.result() - } override def toString: String = file.toString -} -object SourceFile { +object SourceFile: implicit def eqSource: CanEqual[SourceFile, SourceFile] = CanEqual.derived implicit def fromContext(using Context): SourceFile = ctx.source @@ -226,7 +211,7 @@ object SourceFile { * * It returns the absolute path of `source` if it is not contained in `reference`. */ - def relativePath(source: SourceFile, reference: String): String = { + def relativePath(source: SourceFile, reference: String): String = val file = source.file val jpath = file.jpath if jpath eq null then @@ -256,7 +241,6 @@ object SourceFile { path.iterator.asScala.mkString("/") else sourcePath.toString - } /** Return true if file is a script: * if filename extension is not .scala and has a script header. @@ -279,9 +263,7 @@ object SourceFile { SourceFile(file, chars) def apply(file: AbstractFile | Null, computeContent: => Array[Char]): SourceFile = new SourceFile(file, computeContent) -} -@sharable object NoSource extends SourceFile(NoAbstractFile, Array[Char]()) { +@sharable object NoSource extends SourceFile(NoAbstractFile, Array[Char]()): override def exists: Boolean = false override def atSpan(span: Span): SourcePosition = NoSourcePosition -} diff --git a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala index 29f9a34d2292..f381b17a1759 100644 --- a/compiler/src/dotty/tools/dotc/util/SourcePosition.scala +++ b/compiler/src/dotty/tools/dotc/util/SourcePosition.scala @@ -12,7 +12,7 @@ import scala.annotation.internal.sharable /** A source position is comprised of a span and a source file */ case class SourcePosition(source: SourceFile, span: Span, outer: SourcePosition = NoSourcePosition) -extends SrcPos, interfaces.SourcePosition, Showable { +extends SrcPos, interfaces.SourcePosition, Showable: def sourcePos(using Context) = this @@ -34,12 +34,11 @@ extends SrcPos, interfaces.SourcePosition, Showable { source.content.slice(source.startOfLine(start), source.nextLine(end)) /** The lines of the position */ - def lines: Range = { + def lines: Range = val startOffset = source.offsetToLine(start) val endOffset = source.offsetToLine(end - 1) // -1 to drop a line if no chars in it form part of the position if (startOffset >= endOffset) line to line else startOffset to endOffset - } def lineOffsets: List[Int] = lines.toList.map(source.lineToOffset(_)) @@ -72,27 +71,24 @@ extends SrcPos, interfaces.SourcePosition, Showable { /** Inner most position that is contained within the `outermost` position. * Most precise position that comes from the call site. */ - def nonInlined: SourcePosition = { + def nonInlined: SourcePosition = val om = outermost def rec(self: SourcePosition): SourcePosition = if om.contains(self) then self else rec(self.outer) rec(this) - } override def toString: String = s"${if (source.exists) source.file.toString else "(no source)"}:$span" def toText(printer: Printer): Text = printer.toText(this) -} /** A sentinel for a non-existing source position */ -@sharable object NoSourcePosition extends SourcePosition(NoSource, NoSpan, null) { +@sharable object NoSourcePosition extends SourcePosition(NoSource, NoSpan, null): override def line: Int = -1 override def column: Int = -1 override def toString: String = "?" override def withOuter(outer: SourcePosition): SourcePosition = outer -} /** Things that can produce a source position and a span */ trait SrcPos: diff --git a/compiler/src/dotty/tools/dotc/util/Spans.scala b/compiler/src/dotty/tools/dotc/util/Spans.scala index e1487408f36b..49799d11c3d6 100644 --- a/compiler/src/dotty/tools/dotc/util/Spans.scala +++ b/compiler/src/dotty/tools/dotc/util/Spans.scala @@ -14,7 +14,7 @@ import language.implicitConversions * Point: unsigned 12 Bits relative to start * NoSpan encoded as -1L (this is a normally invalid span because point would lie beyond end). */ -object Spans { +object Spans: private inline val StartEndBits = 26 private inline val StartEndMask = (1L << StartEndBits) - 1 @@ -35,29 +35,26 @@ object Spans { * is roughly where the `^` would go if an error was diagnosed at that position. * All quantities are encoded opaquely in a Long. */ - class Span(val coords: Long) extends AnyVal { + class Span(val coords: Long) extends AnyVal: /** Is this span different from NoSpan? */ def exists: Boolean = this != NoSpan /** The start of this span. */ - def start: Int = { + def start: Int = assert(exists) (coords & StartEndMask).toInt - } /** The end of this span */ - def end: Int = { + def end: Int = assert(exists) ((coords >>> StartEndBits) & StartEndMask).toInt - } /** The point of this span, returns start for synthetic spans */ - def point: Int = { + def point: Int = assert(exists) val poff = pointDelta if (poff == SyntheticPointDelta) start else start + poff - } /** The difference between point and start in this span */ def pointDelta: Int = @@ -79,7 +76,7 @@ object Spans { !that.exists || exists && (start <= that.start && end >= that.end) /** Does the range of this span overlap with the range of that span at more than a single point? */ - def overlaps(that: Span): Boolean = { + def overlaps(that: Span): Boolean = def containsInner(span: Span, offset: Int) = span.start < offset && offset < span.end exists && that.exists && ( containsInner(this, that.start) @@ -87,7 +84,6 @@ object Spans { || containsInner(that, this.start) || containsInner(that, this.end) ) - } /** Is this span synthetic? */ def isSynthetic: Boolean = pointDelta == SyntheticPointDelta @@ -132,17 +128,15 @@ object Spans { /** A synthetic copy of this span */ def toSynthetic: Span = if (isSynthetic) this else Span(start, end) - override def toString: String = { + override def toString: String = val (left, right) = if (isSynthetic) ("<", ">") else ("[", "]") if (exists) s"$left$start..${if (point == start) "" else s"$point.."}$end$right" else s"${left}no position${right}" - } def ==(that: Span): Boolean = this.coords == that.coords def !=(that: Span): Boolean = this.coords != that.coords - } private def fromOffsets(start: Int, end: Int, pointDelta: Int) = //assert(start <= end || start == 1 && end == 0, s"$start..$end") @@ -156,10 +150,9 @@ object Spans { fromOffsets(start, end, SyntheticPointDelta) /** A source-derived span with given start, end, and point delta */ - def Span(start: Int, end: Int, point: Int): Span = { + def Span(start: Int, end: Int, point: Int): Span = val pointDelta = (point - start) max 0 fromOffsets(start, end, if (pointDelta >= SyntheticPointDelta) 0 else pointDelta) - } /** A synthetic zero-extent span that starts and ends at given `start`. */ def Span(start: Int): Span = Span(start, start) @@ -170,19 +163,16 @@ object Spans { /** The coordinate of a symbol. This is either an index or * a zero-range span. */ - class Coord(val encoding: Int) extends AnyVal { + class Coord(val encoding: Int) extends AnyVal: def isIndex: Boolean = encoding > 0 def isSpan: Boolean = encoding <= 0 - def toIndex: Int = { + def toIndex: Int = assert(isIndex) encoding - 1 - } - def toSpan: Span = { + def toSpan: Span = assert(isSpan) if (this == NoCoord) NoSpan else Span(-1 - encoding) - } override def toString = if isSpan then s"$toSpan" else s"Coord(idx=$toIndex)" - } /** An index coordinate */ implicit def indexCoord(n: Int): Coord = new Coord(n + 1) @@ -192,4 +182,3 @@ object Spans { /** A sentinel for a missing coordinate */ val NoCoord: Coord = new Coord(0) -} diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala index e9b72015b202..876d4b6ac3c0 100644 --- a/compiler/src/dotty/tools/dotc/util/Stats.scala +++ b/compiler/src/dotty/tools/dotc/util/Stats.scala @@ -7,7 +7,7 @@ import scala.annotation.internal.sharable import core.Contexts._ import collection.mutable -@sharable object Stats { +@sharable object Stats: inline val enabled = false @@ -15,18 +15,16 @@ import collection.mutable @volatile private var stack: List[String] = Nil - val hits: mutable.HashMap[String, Int] = new mutable.HashMap[String, Int] { + val hits: mutable.HashMap[String, Int] = new mutable.HashMap[String, Int]: override def default(key: String): Int = 0 - } inline def record(inline fn: String, inline n: Int = 1): Unit = if (enabled) doRecord(fn, n) def doRecord(fn: String, n: Int) = - if (monitored) { + if (monitored) val name = if (fn.startsWith("member-")) "member" else fn hits(name) += n - } def doRecordSize(fn: String, coll: scala.collection.Iterable[_]): coll.type = doRecord(fn, coll.size) @@ -35,24 +33,21 @@ import collection.mutable inline def trackTime[T](fn: String)(inline op: T): T = if (enabled) doTrackTime(fn)(op) else op - def doTrackTime[T](fn: String)(op: => T): T = { - if (monitored) { + def doTrackTime[T](fn: String)(op: => T): T = + if (monitored) val start = System.nanoTime try op finally record(fn, ((System.nanoTime - start) / 1000).toInt) - } else op - } inline val GroupChar = '/' /** Aggregate all counts of all keys with a common prefix, followed by `:` */ - private def aggregate(): Unit = { + private def aggregate(): Unit = val groups = hits.keys .filter(_.contains(GroupChar)) .groupBy(_.takeWhile(_ != GroupChar)) for ((prefix, names) <- groups; name <- names) hits(s"Total $prefix") += hits(name) - } def maybeMonitored[T](op: => T)(using Context): T = if ctx.settings.YdetailedStats.value then @@ -65,4 +60,3 @@ import collection.mutable println(hits.toList.sortBy(_._2).map{ case (x, y) => s"$x -> $y" } mkString "\n") hits.clear() else op -} diff --git a/compiler/src/dotty/tools/dotc/util/Store.scala b/compiler/src/dotty/tools/dotc/util/Store.scala index d8c9230b9272..b80a7aa6fb74 100644 --- a/compiler/src/dotty/tools/dotc/util/Store.scala +++ b/compiler/src/dotty/tools/dotc/util/Store.scala @@ -1,33 +1,28 @@ package dotty.tools.dotc.util -object Store { +object Store: class Location[T](private[Store] val idx: Int) extends AnyVal val empty: Store = new Store(Array()) -} -class Store(private val elems: Array[AnyRef | Null]) extends AnyVal { +class Store(private val elems: Array[AnyRef | Null]) extends AnyVal: import Store._ - def newLocation[T](): (Location[T], Store) = { + def newLocation[T](): (Location[T], Store) = val elems1 = new Array[AnyRef | Null](elems.length + 1) System.arraycopy(elems, 0, elems1, 0, elems.length) (new Location(elems.length), new Store(elems1)) - } - def newLocation[T](initial: T): (Location[T], Store) = { + def newLocation[T](initial: T): (Location[T], Store) = val (loc, store) = newLocation[T]() store.elems(loc.idx) = initial.asInstanceOf[AnyRef | Null] (loc, store) - } - def updated[T](loc: Location[T], value: T): Store = { + def updated[T](loc: Location[T], value: T): Store = val elems1 = elems.clone elems1(loc.idx) = value.asInstanceOf[AnyRef | Null] new Store(elems1) - } def apply[T](loc: Location[T]): T = elems(loc.idx).asInstanceOf[T] -} diff --git a/compiler/src/dotty/tools/dotc/util/Util.scala b/compiler/src/dotty/tools/dotc/util/Util.scala index 7b6e3b1136ff..b451e0a698e1 100644 --- a/compiler/src/dotty/tools/dotc/util/Util.scala +++ b/compiler/src/dotty/tools/dotc/util/Util.scala @@ -1,6 +1,6 @@ package dotty.tools.dotc.util -object Util { +object Util: /** The index `i` in `candidates.indices` such that `candidates(i) <= x` and * `candidates(i)` is closest to `x`, determined by binary search, or -1 @@ -10,7 +10,7 @@ object Util { * `candidates.length/2`. * @pre candidates is sorted */ - def bestFit(candidates: Array[Int], length: Int, x: Int, hint: Int = -1): Int = { + def bestFit(candidates: Array[Int], length: Int, x: Int, hint: Int = -1): Int = def recur(lo: Int, hi: Int, mid: Int): Int = if (x < candidates(mid)) recur(lo, mid - 1, (lo + mid - 1) / 2) @@ -20,6 +20,4 @@ object Util { val initMid = if (0 <= hint && hint < length) hint else length / 2 if (length == 0 || x < candidates(0)) -1 else recur(0, length, initMid) - } -} diff --git a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala index 975826a87a37..354924a998e8 100644 --- a/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala +++ b/compiler/src/dotty/tools/dotc/util/WeakHashSet.scala @@ -18,7 +18,7 @@ import dotty.tools._ * This set implementation is not in general thread safe without external concurrency control. However it behaves * properly when GC concurrently collects elements in this set. */ -abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Double = 0.5) extends MutableSet[A] { +abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Double = 0.5) extends MutableSet[A]: import WeakHashSet._ @@ -40,13 +40,12 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do * from a specified initial capacity compute the capacity we'll use as being the next * power of two equal to or greater than the specified initial capacity */ - private def computeCapacity = { + private def computeCapacity = if (initialCapacity < 0) throw new IllegalArgumentException("initial capacity cannot be less than 0") var candidate = 1 while (candidate < initialCapacity) candidate *= 2 candidate - } /** * the underlying table of entries which is an array of Entry linked lists @@ -69,25 +68,23 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do /** * remove a single entry from a linked list in a given bucket */ - private def remove(bucket: Int, prevEntry: Entry[A] | Null, entry: Entry[A]): Unit = { + private def remove(bucket: Int, prevEntry: Entry[A] | Null, entry: Entry[A]): Unit = Stats.record(statsItem("remove")) - prevEntry match { + prevEntry match case null => table(bucket) = entry.tail case _ => prevEntry.tail = entry.tail - } count -= 1 - } /** * remove entries associated with elements that have been gc'ed */ - protected def removeStaleEntries(): Unit = { + protected def removeStaleEntries(): Unit = def poll(): Entry[A] | Null = queue.poll().asInstanceOf @tailrec - def queueLoop(): Unit = { + def queueLoop(): Unit = val stale = poll() - if (stale != null) { + if (stale != null) val bucket = index(stale.hash) @tailrec @@ -99,25 +96,22 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do linkedListLoop(null, table(bucket)) queueLoop() - } - } queueLoop() - } /** * Double the size of the internal table */ - protected def resize(): Unit = { + protected def resize(): Unit = Stats.record(statsItem("resize")) val oldTable = table table = new Array[Entry[A] | Null](oldTable.size * 2) threshold = computeThreshold @tailrec - def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) { + def tableLoop(oldBucket: Int): Unit = if (oldBucket < oldTable.size) @tailrec - def linkedListLoop(entry: Entry[A] | Null): Unit = entry match { + def linkedListLoop(entry: Entry[A] | Null): Unit = entry match case null => () case _ => val bucket = index(entry.hash) @@ -125,17 +119,14 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do entry.tail = table(bucket) table(bucket) = entry linkedListLoop(oldNext) - } linkedListLoop(oldTable(oldBucket)) tableLoop(oldBucket + 1) - } tableLoop(0) - } // TODO: remove the `case null` when we can enable explicit nulls in regular compiling, // since the type `A <: AnyRef` of `elem` can ensure the value is not null. - def lookup(elem: A): A | Null = (elem: A | Null) match { + def lookup(elem: A): A | Null = (elem: A | Null) match case null => throw new NullPointerException("WeakHashSet cannot hold nulls") case _ => Stats.record(statsItem("lookup")) @@ -143,28 +134,25 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do val bucket = index(hash(elem)) @tailrec - def linkedListLoop(entry: Entry[A] | Null): A | Null = entry match { + def linkedListLoop(entry: Entry[A] | Null): A | Null = entry match case null => null case _ => val entryElem = entry.get if entryElem != null && isEqual(elem, entryElem) then entryElem else linkedListLoop(entry.tail) - } linkedListLoop(table(bucket)) - } - protected def addEntryAt(bucket: Int, elem: A, elemHash: Int, oldHead: Entry[A] | Null): A = { + protected def addEntryAt(bucket: Int, elem: A, elemHash: Int, oldHead: Entry[A] | Null): A = Stats.record(statsItem("addEntryAt")) table(bucket) = new Entry(elem, elemHash, oldHead, queue) count += 1 if (count > threshold) resize() elem - } // TODO: remove the `case null` when we can enable explicit nulls in regular compiling, // since the type `A <: AnyRef` of `elem` can ensure the value is not null. - def put(elem: A): A = (elem: A | Null) match { + def put(elem: A): A = (elem: A | Null) match case null => throw new NullPointerException("WeakHashSet cannot hold nulls") case _ => Stats.record(statsItem("put")) @@ -174,20 +162,18 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do val oldHead = table(bucket) @tailrec - def linkedListLoop(entry: Entry[A] | Null): A = entry match { + def linkedListLoop(entry: Entry[A] | Null): A = entry match case null => addEntryAt(bucket, elem, h, oldHead) case _ => val entryElem = entry.get if entryElem != null && isEqual(elem, entryElem) then entryElem.uncheckedNN else linkedListLoop(entry.tail) - } linkedListLoop(oldHead) - } def +=(elem: A): Unit = put(elem) - def -=(elem: A): Unit = (elem: A | Null) match { + def -=(elem: A): Unit = (elem: A | Null) match case null => case _ => Stats.record(statsItem("-=")) @@ -202,9 +188,8 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do else linkedListLoop(entry, entry.tail) linkedListLoop(null, table(bucket)) - } - def clear(resetToInitial: Boolean): Unit = { + def clear(resetToInitial: Boolean): Unit = table = new Array[Entry[A] | Null](table.size) threshold = computeThreshold count = 0 @@ -212,18 +197,16 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do // drain the queue - doesn't do anything because we're throwing away all the values anyway @tailrec def queueLoop(): Unit = if (queue.poll() != null) queueLoop() queueLoop() - } - def size: Int = { + def size: Int = removeStaleEntries() count - } // Iterator over all the elements in this set in no particular order - override def iterator: Iterator[A] = { + override def iterator: Iterator[A] = removeStaleEntries() - new collection.AbstractIterator[A] { + new collection.AbstractIterator[A]: /** * the bucket currently being examined. Initially it's set past the last bucket and will be decremented @@ -241,23 +224,20 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do private var lookaheadelement: A | Null = null @tailrec - def hasNext: Boolean = { - while (entry == null && currentBucket > 0) { + def hasNext: Boolean = + while (entry == null && currentBucket > 0) currentBucket -= 1 entry = table(currentBucket) - } val e = entry if (e == null) false - else { + else lookaheadelement = e.get if lookaheadelement == null then // element null means the weakref has been cleared since we last did a removeStaleEntries(), move to the next entry entry = e.tail hasNext else true - } - } def next(): A = if lookaheadelement == null then @@ -267,32 +247,29 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do lookaheadelement = null entry = entry.nn.tail result - } - } - protected def statsItem(op: String): String = { + protected def statsItem(op: String): String = val prefix = "WeakHashSet." val suffix = getClass.getSimpleName s"$prefix$op $suffix" - } /** * Diagnostic information about the internals of this set. Not normally * needed by ordinary code, but may be useful for diagnosing performance problems */ - private[util] class Diagnostics { + private[util] class Diagnostics: /** * Verify that the internal structure of this hash set is fully consistent. * Throws an assertion error on any problem. In order for it to be reliable * the entries must be stable. If any are garbage collected during validation * then an assertion may inappropriately fire. */ - def fullyValidate(): Unit = { + def fullyValidate(): Unit = var computedCount = 0 var bucket = 0 - while (bucket < table.size) { + while (bucket < table.size) var entry = table(bucket) - while (entry != null) { + while (entry != null) assert(entry.get != null, s"$entry had a null value indicated that gc activity was happening during diagnostic validation or that a null value was inserted") computedCount += 1 val cachedHash = entry.hash @@ -302,13 +279,10 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do assert(computedBucket == bucket, s"for $entry the computed bucket was $computedBucket but should have been $bucket") entry = entry.tail - } bucket += 1 - } assert(computedCount == count, s"The computed count was $computedCount but should have been $count") - } /** * Produces a diagnostic dump of the table that underlies this hash set. @@ -331,19 +305,16 @@ abstract class WeakHashSet[A <: AnyRef](initialCapacity: Int = 8, loadFactor: Do * Number of buckets in the table */ def bucketsCount: Int = table.size - } private[util] def diagnostics: Diagnostics = new Diagnostics -} /** * Companion object for WeakHashSet */ -object WeakHashSet { +object WeakHashSet: /** * A single entry in a WeakHashSet. It's a WeakReference plus a cached hash code and * a link to the next Entry in the same bucket */ class Entry[A](@constructorOnly element: A, val hash:Int, var tail: Entry[A] | Null, @constructorOnly queue: ReferenceQueue[A]) extends WeakReference[A](element, queue) -} diff --git a/compiler/src/dotty/tools/dotc/util/common.scala b/compiler/src/dotty/tools/dotc/util/common.scala index 85ce9a29f2df..cbc2e65232b3 100644 --- a/compiler/src/dotty/tools/dotc/util/common.scala +++ b/compiler/src/dotty/tools/dotc/util/common.scala @@ -4,10 +4,9 @@ package util import core.Types.WildcardType /** Common values hoisted out for performance */ -object common { +object common: val alwaysTrue: Any => Boolean = Function.const(true) val alwaysFalse: Any => Boolean = Function.const(false) val alwaysZero: Any => Int = Function.const(0) val alwaysWildcardType: Any => WildcardType.type = Function.const(WildcardType) -} diff --git a/compiler/src/dotty/tools/dotc/util/concurrent.scala b/compiler/src/dotty/tools/dotc/util/concurrent.scala index 2710aae6c9b0..f43734bb5e6c 100644 --- a/compiler/src/dotty/tools/dotc/util/concurrent.scala +++ b/compiler/src/dotty/tools/dotc/util/concurrent.scala @@ -8,14 +8,12 @@ object concurrent: class Future[T](exec: Executor[T]): private var result: Option[Try[T]] = None - def force: Try[T] = synchronized { + def force: Try[T] = synchronized: while result.isEmpty && exec.isAlive do wait(1000 /*ms*/) result.getOrElse(Failure(NoCompletion())) - } - def complete(r: Try[T]): Unit = synchronized { + def complete(r: Try[T]): Unit = synchronized: result = Some(r) notifyAll() - } end Future class Executor[T] extends Thread: @@ -24,27 +22,24 @@ object concurrent: private var allScheduled = false private val pending = new ArrayBuffer[WorkItem] - def schedule(op: () => T): Future[T] = synchronized { + def schedule(op: () => T): Future[T] = synchronized: assert(!allScheduled) val f = Future[T](this) pending += ((f, op)) notifyAll() f - } - def close(): Unit = synchronized { + def close(): Unit = synchronized: allScheduled = true notifyAll() - } - private def nextPending(): Option[WorkItem] = synchronized { + private def nextPending(): Option[WorkItem] = synchronized: while pending.isEmpty && !allScheduled do wait(1000 /*ms*/) if pending.isEmpty then None else val item = pending.head pending.dropInPlace(1) Some(item) - } override def run(): Unit = while diff --git a/compiler/src/dotty/tools/io/AbstractFile.scala b/compiler/src/dotty/tools/io/AbstractFile.scala index f34fe6f40b9c..14bf2405b47a 100644 --- a/compiler/src/dotty/tools/io/AbstractFile.scala +++ b/compiler/src/dotty/tools/io/AbstractFile.scala @@ -22,7 +22,7 @@ import java.nio.file.{FileAlreadyExistsException, Files, Paths} * @author Philippe Altherr * @version 1.0, 23/03/2004 */ -object AbstractFile { +object AbstractFile: def getFile(path: String): AbstractFile = getFile(File(path)) def getDirectory(path: String): AbstractFile = getDirectory(Directory(path)) def getFile(path: JPath): AbstractFile = getFile(File(path)) @@ -55,7 +55,6 @@ object AbstractFile { else new PlainFile(new Path(Paths.get(url.toURI))) def getResources(url: URL): AbstractFile = ZipArchive fromManifestURL url -} /** *

@@ -83,7 +82,7 @@ object AbstractFile { * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -abstract class AbstractFile extends Iterable[AbstractFile] { +abstract class AbstractFile extends Iterable[AbstractFile]: /** Returns the name of this abstract file. */ def name: String @@ -108,12 +107,11 @@ abstract class AbstractFile extends Iterable[AbstractFile] { def container : AbstractFile /** Returns the underlying File if any and null otherwise. */ - def file: JFile = try { + def file: JFile = try if (jpath == null) null else jpath.toFile - } catch { + catch case _: UnsupportedOperationException => null - } /** Returns the underlying Path if any and null otherwise. */ def jpath: JPath @@ -122,9 +120,8 @@ abstract class AbstractFile extends Iterable[AbstractFile] { def underlyingSource: Option[AbstractFile] = None /** Does this abstract file denote an existing file? */ - def exists: Boolean = { + def exists: Boolean = (jpath eq null) || Files.exists(jpath) - } /** Does this abstract file represent something which can contain classfiles? */ def isClassContainer: Boolean = isDirectory || (jpath != null && (extension == "jar" || extension == "zip")) @@ -168,31 +165,27 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Returns contents of file (if applicable) in a byte array. */ @throws(classOf[IOException]) - def toByteArray: Array[Byte] = { + def toByteArray: Array[Byte] = val in = input - sizeOption match { + sizeOption match case Some(size) => var rest = size val arr = new Array[Byte](rest) - while (rest > 0) { + while (rest > 0) val res = in.read(arr, arr.length - rest, rest) if (res == -1) throw new IOException("read error") rest -= res - } in.close() arr case None => val out = new ByteArrayOutputStream() var c = in.read() - while(c != -1) { + while(c != -1) out.write(c) c = in.read() - } in.close() out.toByteArray() - } - } /** Returns all abstract subfiles of this abstract directory. */ def iterator(): Iterator[AbstractFile] @@ -225,13 +218,12 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Return an abstract file that does not check that `path` denotes * an existing file. */ - def lookupPathUnchecked(path: String, directory: Boolean): AbstractFile = { + def lookupPathUnchecked(path: String, directory: Boolean): AbstractFile = lookup((f, p, dir) => f.lookupNameUnchecked(p, dir), path, directory) - } private def lookup(getFile: (AbstractFile, String, Boolean) => AbstractFile, path0: String, - directory: Boolean): AbstractFile = { + directory: Boolean): AbstractFile = val separator = java.io.File.separatorChar // trim trailing '/'s val path: String = if (path0.last == separator) path0 dropRight 1 else path0 @@ -239,19 +231,17 @@ abstract class AbstractFile extends Iterable[AbstractFile] { assert(length > 0 && !(path.last == separator), path) var file = this var start = 0 - while (true) { + while (true) val index = path.indexOf(separator, start) assert(index < 0 || start < index, ((path, directory, start, index))) val name = path.substring(start, if (index < 0) length else index) file = getFile(file, name, if (index < 0) directory else true) if ((file eq null) || index < 0) return file start = index + 1 - } file - } private def fileOrSubdirectoryNamed(name: String, isDir: Boolean): AbstractFile = - lookupName(name, isDir) match { + lookupName(name, isDir) match case null => // the optional exception may be thrown for symlinks, notably /tmp on macOS. // isDirectory tests for existing directory. The default behavior is hypothetical isDirectory(jpath, FOLLOW_LINKS). @@ -266,25 +256,22 @@ abstract class AbstractFile extends Iterable[AbstractFile] { catch case _: FileAlreadyExistsException => () new PlainFile(new File(path)) case lookup => lookup - } /** * Get the file in this directory with the given name, * creating an empty file if it does not already existing. */ - def fileNamed(name: String): AbstractFile = { + def fileNamed(name: String): AbstractFile = assert(isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path)) fileOrSubdirectoryNamed(name, isDir = false) - } /** * Get the subdirectory with a given name, creating it if it * does not already exist. */ - def subdirectoryNamed(name: String): AbstractFile = { + def subdirectoryNamed(name: String): AbstractFile = assert (isDirectory, "Tried to find '%s' in '%s' but it is not a directory".format(name, path)) fileOrSubdirectoryNamed(name, isDir = true) - } protected def unsupported(): Nothing = unsupported(null) protected def unsupported(msg: String): Nothing = throw new UnsupportedOperationException(msg) @@ -292,4 +279,3 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Returns the path of this abstract file. */ override def toString(): String = path -} diff --git a/compiler/src/dotty/tools/io/ClassPath.scala b/compiler/src/dotty/tools/io/ClassPath.scala index b45de57f9850..917c37889039 100644 --- a/compiler/src/dotty/tools/io/ClassPath.scala +++ b/compiler/src/dotty/tools/io/ClassPath.scala @@ -20,7 +20,7 @@ import dotc.classpath.{ PackageEntry, ClassPathEntries, PackageName } /** * A representation of the compiler's class- or sourcepath. */ -trait ClassPath { +trait ClassPath: import dotty.tools.dotc.classpath._ def asURLs: Seq[URL] @@ -67,7 +67,7 @@ trait ClassPath { * https://github.com/sbt/sbt/blob/v0.13.15/compile/interface/src/main/scala/xsbt/CompilerInterface.scala#L249 * Jason has some improvements for that in the works (https://github.com/scala/bug/issues/10289#issuecomment-310022699) */ - def findClass(className: String): Option[ClassRepresentation] = { + def findClass(className: String): Option[ClassRepresentation] = // A default implementation which should be overridden, if we can create the more efficient // solution for a given type of ClassPath val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) @@ -77,7 +77,6 @@ trait ClassPath { def findClassInSources = sources(packageName).find(_.name == simpleClassName) foundClassFromClassFiles orElse findClassInSources - } /** * Returns the classfile for an external name, e.g., "java.lang.String". This method does not @@ -102,30 +101,26 @@ trait ClassPath { /** The whole sourcepath in the form of one String. */ def asSourcePathString: String -} -trait EfficientClassPath extends ClassPath { +trait EfficientClassPath extends ClassPath: def list(inPackage: PackageName, onPackageEntry: PackageEntry => Unit, onClassesAndSources: ClassRepresentation => Unit): Unit - override def list(inPackage: PackageName): ClassPathEntries = { + override def list(inPackage: PackageName): ClassPathEntries = val packageBuf = collection.mutable.ArrayBuffer.empty[PackageEntry] val classRepBuf = collection.mutable.ArrayBuffer.empty[ClassRepresentation] list(inPackage, packageBuf += _, classRepBuf += _) if (packageBuf.isEmpty && classRepBuf.isEmpty) ClassPathEntries.empty else ClassPathEntries(packageBuf, classRepBuf) - } -} -trait EfficientClassPathCallBack { +trait EfficientClassPathCallBack: def packageEntry(entry: PackageEntry): Unit def classesAndSources(entry: ClassRepresentation): Unit -} -object ClassPath { +object ClassPath: val RootPackage: String = "" /** Expand single path entry */ - private def expandS(pattern: String): List[String] = { + private def expandS(pattern: String): List[String] = val wildSuffix = File.separator + "*" /* Get all subdirectories, jars, zips out of a directory. */ @@ -135,15 +130,12 @@ object ClassPath { if (pattern == "*") lsDir(Directory(".")) // On Windows the JDK supports forward slash or backslash in classpath entries else if (pattern.endsWith(wildSuffix) || pattern.endsWith("/*")) lsDir(Directory(pattern dropRight 2)) - else if (pattern.contains('*')) { - try { + else if (pattern.contains('*')) + try val regexp = ("^" + pattern.replace("""\*""", """.*""") + "$").r lsDir(Directory(pattern).parent, regexp.findFirstIn(_).isDefined) - } catch { case _: PatternSyntaxException => List(pattern) } - } else List(pattern) - } /** Split classpath using platform-dependent path separator */ def split(path: String): List[String] = path.split(pathSeparator).toList.filterNot(_ == "").distinct @@ -160,17 +152,15 @@ object ClassPath { else split(path) /** Expand dir out to contents, a la extdir */ - def expandDir(extdir: String): List[String] = { - AbstractFile getDirectory extdir match { + def expandDir(extdir: String): List[String] = + AbstractFile getDirectory extdir match case null => Nil case dir => dir.filter(_.isClassContainer).map(x => new java.io.File(dir.file, x.name).getPath).toList - } - } /** Expand manifest jar classpath entries: these are either urls, or paths * relative to the location of the jar. */ - def expandManifestPath(jarPath: String): List[URL] = { + def expandManifestPath(jarPath: String): List[URL] = val file = File(jarPath) if (!file.isFile) return Nil @@ -178,26 +168,23 @@ object ClassPath { new Jar(file).classPathElements map (elem => specToURL(elem) getOrElse (baseDir / elem).toURL ) - } def specToURL(spec: String): Option[URL] = try Some(new URI(spec).toURL) catch case _: MalformedURLException | _: URISyntaxException => None - def manifests: List[java.net.URL] = { + def manifests: List[java.net.URL] = import scala.jdk.CollectionConverters.EnumerationHasAsScala val resources = Thread.currentThread().getContextClassLoader().getResources("META-INF/MANIFEST.MF") resources.asScala.filter(_.getProtocol == "jar").toList - } @deprecated("shim for sbt's compiler interface", since = "2.12.0") sealed abstract class ClassPathContext @deprecated("shim for sbt's compiler interface", since = "2.12.0") sealed abstract class JavaContext -} -trait ClassRepresentation { +trait ClassRepresentation: def fileName: String def name: String def binary: Option[AbstractFile] @@ -207,11 +194,9 @@ trait ClassRepresentation { * * Used to avoid creating String instance of `name`. */ - final def nameLength: Int = { + final def nameLength: Int = val ix = fileName.lastIndexOf('.') if (ix < 0) fileName.length else ix - } -} @deprecated("shim for sbt's compiler interface", since = "2.12.0") sealed abstract class DirectoryClassPath diff --git a/compiler/src/dotty/tools/io/Directory.scala b/compiler/src/dotty/tools/io/Directory.scala index 7c2067390a09..ac0e7d205f32 100644 --- a/compiler/src/dotty/tools/io/Directory.scala +++ b/compiler/src/dotty/tools/io/Directory.scala @@ -16,22 +16,20 @@ import java.nio.file.{Files, Paths} /** * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -object Directory { +object Directory: import scala.util.Properties.userDir def Current: Option[Directory] = if (userDir == "") None else Some(apply(userDir).normalize) - def inTempDirectory[T](fn: Directory => T): T = { + def inTempDirectory[T](fn: Directory => T): T = val temp = Directory(Files.createTempDirectory("temp")) try fn(temp) finally temp.deleteRecursively() - } def apply(path: String): Directory = apply(Paths.get(path)) def apply(path: JPath): Directory = new Directory(path) -} /** An abstraction for directories. * @@ -40,7 +38,7 @@ object Directory { * * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' */ -class Directory(jpath: JPath) extends Path(jpath) { +class Directory(jpath: JPath) extends Path(jpath): override def toAbsolute: Directory = if (isAbsolute) this else super.toAbsolute.toDirectory override def toDirectory: Directory = this override def toFile: File = new File(jpath) @@ -49,12 +47,11 @@ class Directory(jpath: JPath) extends Path(jpath) { /** An iterator over the contents of this directory. */ def list: Iterator[Path] = - if (isDirectory) { + if (isDirectory) val fileStream = Files.list(jpath) val files = fileStream.toArray(size => new Array[JPath](size)) fileStream.close() files.iterator.map(Path.apply) - } else Iterator.empty def dirs: Iterator[Directory] = list collect { case x: Directory => x } @@ -72,4 +69,3 @@ class Directory(jpath: JPath) extends Path(jpath) { if (depth < 0) list ++ dirs.flatMap(_.deepList(depth)) else if (depth == 0) Iterator.empty else list ++ dirs.flatMap(_.deepList(depth - 1)) -} diff --git a/compiler/src/dotty/tools/io/File.scala b/compiler/src/dotty/tools/io/File.scala index 27f2c077dd6a..7860d88460ae 100644 --- a/compiler/src/dotty/tools/io/File.scala +++ b/compiler/src/dotty/tools/io/File.scala @@ -18,13 +18,12 @@ import scala.io.Codec /** * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -object File { +object File: def pathSeparator: String = JavaIoFile.pathSeparator def separator: String = JavaIoFile.separator def apply(path: String)(implicit codec: Codec): File = apply(Paths.get(path)) def apply(path: JPath)(implicit codec: Codec): File = new File(path) -} /** An abstraction for files. For character data, a Codec * can be supplied at either creation time or when a method @@ -37,7 +36,7 @@ object File { * * ''Note: This is library is considered experimental and should not be used unless you know what you are doing.'' */ -class File(jpath: JPath)(implicit constructorCodec: Codec) extends Path(jpath) with Streamable.Chars { +class File(jpath: JPath)(implicit constructorCodec: Codec) extends Path(jpath) with Streamable.Chars: override val creationCodec: io.Codec = constructorCodec override def addExtension(ext: String): File = super.addExtension(ext).toFile @@ -75,24 +74,21 @@ class File(jpath: JPath)(implicit constructorCodec: Codec) extends Path(jpath) w def printWriter(): PrintWriter = new PrintWriter(bufferedWriter(), true) /** Creates a new file and writes all the Strings to it. */ - def writeAll(strings: String*): Unit = { + def writeAll(strings: String*): Unit = val out = bufferedWriter() try strings foreach (out write _) finally out.close() - } - def appendAll(strings: String*): Unit = { + def appendAll(strings: String*): Unit = val out = bufferedWriter(append = true) try strings foreach (out write _) finally out.close() - } /** Calls println on each string (so it adds a newline in the PrintWriter fashion.) */ - def printlnAll(strings: String*): Unit = { + def printlnAll(strings: String*): Unit = val out = printWriter() try strings foreach (out println _) finally out.close() - } def safeSlurp(): Option[String] = try Some(slurp()) @@ -100,7 +96,7 @@ class File(jpath: JPath)(implicit constructorCodec: Codec) extends Path(jpath) w /** Reflection since we're into the java 6+ API. */ - def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = { + def setExecutable(executable: Boolean, ownerOnly: Boolean = true): Boolean = type JBoolean = java.lang.Boolean val method = try classOf[JFile].getMethod("setExecutable", classOf[Boolean], classOf[Boolean]) @@ -108,5 +104,3 @@ class File(jpath: JPath)(implicit constructorCodec: Codec) extends Path(jpath) w try method.invoke(jpath.toFile, executable: JBoolean, ownerOnly: JBoolean).asInstanceOf[JBoolean].booleanValue catch { case _: Exception => false } - } -} diff --git a/compiler/src/dotty/tools/io/Jar.scala b/compiler/src/dotty/tools/io/Jar.scala index 9979a9ca9379..3db379d03cd2 100644 --- a/compiler/src/dotty/tools/io/Jar.scala +++ b/compiler/src/dotty/tools/io/Jar.scala @@ -36,7 +36,7 @@ import scala.annotation.tailrec // static Attributes.Name SPECIFICATION_VENDOR // static Attributes.Name SPECIFICATION_VERSION -class Jar(file: File) { +class Jar(file: File): def this(jfile: JFile) = this(File(jfile.toPath)) def this(path: String) = this(File(path)) @@ -51,103 +51,86 @@ class Jar(file: File) { /** The manifest-defined classpath String if available. */ def classPathString: Option[String] = for (m <- manifest ; cp <- m.attrs.get(Name.CLASS_PATH)) yield cp - def classPathElements: List[String] = classPathString match { + def classPathElements: List[String] = classPathString match case Some(s) => s.split("\\s+").toList case _ => Nil - } - def withJarInput[T](f: JarInputStream => T): T = { + def withJarInput[T](f: JarInputStream => T): T = val in = new JarInputStream(file.inputStream()) try f(in) finally in.close() - } - def jarWriter(mainAttrs: (Attributes.Name, String)*): JarWriter = { + def jarWriter(mainAttrs: (Attributes.Name, String)*): JarWriter = new JarWriter(file, Jar.WManifest.apply(mainAttrs: _*).underlying) - } def toList: List[JarEntry] = withJarInput { in => Iterator.continually(in.getNextJarEntry()).takeWhile(_ != null).toList } - def getEntryStream(entry: JarEntry): java.io.InputStream = jarFile getInputStream entry match { + def getEntryStream(entry: JarEntry): java.io.InputStream = jarFile getInputStream entry match case null => errorFn("No such entry: " + entry) ; null case x => x - } override def toString: String = "" + file -} -class JarWriter(val file: File, val manifest: Manifest) { +class JarWriter(val file: File, val manifest: Manifest): private lazy val out = new JarOutputStream(file.outputStream(), manifest) /** Adds a jar entry for the given path and returns an output * stream to which the data should immediately be written. * This unusual interface exists to work with fjbg. */ - def newOutputStream(path: String): DataOutputStream = { + def newOutputStream(path: String): DataOutputStream = val entry = new JarEntry(path) out putNextEntry entry new DataOutputStream(out) - } - def writeAllFrom(dir: Directory): Unit = { + def writeAllFrom(dir: Directory): Unit = try dir.list foreach (x => addEntry(x, "")) finally out.close() - } - def addStream(entry: JarEntry, in: InputStream): Unit = { + def addStream(entry: JarEntry, in: InputStream): Unit = out putNextEntry entry try transfer(in, out) finally out.closeEntry() - } - def addFile(file: File, prefix: String): Unit = { + def addFile(file: File, prefix: String): Unit = val entry = new JarEntry(prefix + file.name) addStream(entry, file.inputStream()) - } - def addEntry(entry: Path, prefix: String): Unit = { + def addEntry(entry: Path, prefix: String): Unit = if (entry.isFile) addFile(entry.toFile, prefix) else addDirectory(entry.toDirectory, prefix + entry.name + "/") - } - def addDirectory(entry: Directory, prefix: String): Unit = { + def addDirectory(entry: Directory, prefix: String): Unit = entry.list foreach (p => addEntry(p, prefix)) - } - private def transfer(in: InputStream, out: OutputStream) = { + private def transfer(in: InputStream, out: OutputStream) = val buf = new Array[Byte](10240) - @tailrec def loop(): Unit = in.read(buf, 0, buf.length) match { + @tailrec def loop(): Unit = in.read(buf, 0, buf.length) match case -1 => in.close() case n => out.write(buf, 0, n) ; loop() - } loop() - } def close(): Unit = out.close() -} -object Jar { +object Jar: type AttributeMap = java.util.Map[Attributes.Name, String] - object WManifest { - def apply(mainAttrs: (Attributes.Name, String)*): WManifest = { + object WManifest: + def apply(mainAttrs: (Attributes.Name, String)*): WManifest = val m = WManifest(new JManifest) for ((k, v) <- mainAttrs) m(k) = v m - } - } - implicit class WManifest(val manifest: JManifest) { + implicit class WManifest(val manifest: JManifest): for ((k, v) <- initialMainAttrs) this(k) = v def underlying: JManifest = manifest def attrs: mutable.Map[Name, String] = manifest.getMainAttributes().asInstanceOf[AttributeMap].asScala withDefaultValue null - def initialMainAttrs: Map[Attributes.Name, String] = { + def initialMainAttrs: Map[Attributes.Name, String] = import scala.util.Properties._ Map( Name.MANIFEST_VERSION -> "1.0", ScalaCompilerVersion -> versionNumberString ) - } def apply(name: Attributes.Name): String = attrs(name) def apply(name: String): String = apply(new Attributes.Name(name)) @@ -156,7 +139,6 @@ object Jar { def mainClass: String = apply(Name.MAIN_CLASS) def mainClass_=(value: String): Option[String] = update(Name.MAIN_CLASS, value) - } // See http://download.java.net/jdk7/docs/api/java/nio/file/Path.html // for some ideas. @@ -167,8 +149,6 @@ object Jar { def isJarOrZip(f: Path, examineFile: Boolean): Boolean = f.hasExtension("zip", "jar") || (examineFile && magicNumberIsZip(f)) - def create(file: File, sourceDir: Directory, mainClass: String): Unit = { + def create(file: File, sourceDir: Directory, mainClass: String): Unit = val writer = new Jar(file).jarWriter(Name.MAIN_CLASS -> mainClass) writer writeAllFrom sourceDir - } -} diff --git a/compiler/src/dotty/tools/io/JarArchive.scala b/compiler/src/dotty/tools/io/JarArchive.scala index 8f3b5d8010e4..e3c247bf9b6b 100644 --- a/compiler/src/dotty/tools/io/JarArchive.scala +++ b/compiler/src/dotty/tools/io/JarArchive.scala @@ -10,35 +10,29 @@ import scala.jdk.CollectionConverters._ * This class implements an [[AbstractFile]] backed by a jar * that be can used as the compiler's output directory. */ -class JarArchive private (root: Directory) extends PlainDirectory(root) { +class JarArchive private (root: Directory) extends PlainDirectory(root): def close(): Unit = jpath.getFileSystem().close() def allFileNames(): Iterator[String] = java.nio.file.Files.walk(jpath).iterator().asScala.map(_.toString) -} -object JarArchive { +object JarArchive: /** Create a new jar file. Overwrite if file already exists */ - def create(path: Path): JarArchive = { + def create(path: Path): JarArchive = require(path.extension == "jar") path.delete() open(path, create = true) - } /** Create a jar file. */ - def open(path: Path, create: Boolean = false): JarArchive = { + def open(path: Path, create: Boolean = false): JarArchive = require(path.extension == "jar") // creating a new zip file system by using the JAR URL syntax: // https://docs.oracle.com/javase/7/docs/technotes/guides/io/fsp/zipfilesystemprovider.html val env = Map("create" -> create.toString).asJava val uri = java.net.URI.create("jar:" + path.toAbsolute.toURI.toString) - val fs = { + val fs = try FileSystems.newFileSystem(uri, env) - catch { + catch case _: FileSystemAlreadyExistsException => FileSystems.getFileSystem(uri) - } - } val root = fs.getRootDirectories().iterator.next() new JarArchive(Directory(root)) - } -} diff --git a/compiler/src/dotty/tools/io/NoAbstractFile.scala b/compiler/src/dotty/tools/io/NoAbstractFile.scala index 13c2c6851d2b..abe2e13f04d7 100644 --- a/compiler/src/dotty/tools/io/NoAbstractFile.scala +++ b/compiler/src/dotty/tools/io/NoAbstractFile.scala @@ -14,7 +14,7 @@ import java.io.InputStream * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -object NoAbstractFile extends AbstractFile { +object NoAbstractFile extends AbstractFile: def absolute: AbstractFile = this def container: AbstractFile = this def create(): Unit = ??? @@ -32,4 +32,3 @@ object NoAbstractFile extends AbstractFile { def path: String = "" override def toByteArray: Array[Byte] = Array[Byte]() override def toString: String = "" -} diff --git a/compiler/src/dotty/tools/io/Path.scala b/compiler/src/dotty/tools/io/Path.scala index dddb870afc65..83e79beed6c9 100644 --- a/compiler/src/dotty/tools/io/Path.scala +++ b/compiler/src/dotty/tools/io/Path.scala @@ -31,20 +31,18 @@ import scala.util.Random.alphanumeric * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -object Path { +object Path: def isExtensionJarOrZip(jpath: JPath): Boolean = isExtensionJarOrZip(jpath.getFileName.toString) - def isExtensionJarOrZip(name: String): Boolean = { + def isExtensionJarOrZip(name: String): Boolean = val ext = extension(name) ext == "jar" || ext == "zip" - } - def extension(name: String): String = { + def extension(name: String): String = var i = name.length - 1 while (i >= 0 && name.charAt(i) != '.') i -= 1 if (i < 0) "" else name.substring(i + 1).toLowerCase - } def onlyDirs(xs: Iterator[Path]): Iterator[Directory] = xs.filter(_.isDirectory).map(_.toDirectory) def onlyDirs(xs: List[Path]): List[Directory] = xs.filter(_.isDirectory).map(_.toDirectory) @@ -53,16 +51,15 @@ object Path { def roots: List[Path] = FileSystems.getDefault.getRootDirectories.iterator().asScala.map(Path.apply).toList def apply(path: String): Path = apply(new java.io.File(path).toPath) - def apply(jpath: JPath): Path = try { + def apply(jpath: JPath): Path = try if (Files.isRegularFile(jpath)) new File(jpath) else if (Files.isDirectory(jpath)) new Directory(jpath) else new Path(jpath) - } catch { case ex: SecurityException => new Path(jpath) } + catch { case ex: SecurityException => new Path(jpath) } /** Avoiding any shell/path issues by only using alphanumerics. */ private[io] def randomPrefix: String = alphanumeric take 6 mkString "" private[io] def fail(msg: String): Nothing = throw FileOperationException(msg) -} import Path._ /** The Path constructor is private so we can enforce some @@ -70,7 +67,7 @@ import Path._ * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -class Path private[io] (val jpath: JPath) { +class Path private[io] (val jpath: JPath): val separator: Char = java.io.File.separatorChar val separatorStr: String = java.io.File.separator @@ -113,10 +110,9 @@ class Path private[io] (val jpath: JPath) { def walk: Iterator[Path] = walkFilter(_ => true) // identity - def name: String = jpath.getFileName() match { + def name: String = jpath.getFileName() match case null => "" case name => name.toString - } def path: String = jpath.toString def normalize: Path = new Path(jpath.normalize) @@ -128,7 +124,7 @@ class Path private[io] (val jpath: JPath) { /** * @return The path of the parent directory, or root if path is already root */ - def parent: Directory = { + def parent: Directory = // We don't call JPath#normalize here because it may result in resolving // to a different path than intended, such as when the given path contains // a `..` component and the preceding name is a symbolic link. @@ -155,18 +151,15 @@ class Path private[io] (val jpath: JPath) { else Directory(".") case x => Directory(x) - } - def parents: List[Directory] = { + def parents: List[Directory] = val p = parent if (p isSame this) Nil else p :: p.parents - } // if name ends with an extension (e.g. "foo.jpg") returns the extension ("jpg"), otherwise "" def extension: String = Path.extension(name) // compares against extensions in a CASE INSENSITIVE way. - def hasExtension(ext: String, exts: String*): Boolean = { + def hasExtension(ext: String, exts: String*): Boolean = val lower = extension.toLowerCase ext.toLowerCase == lower || exts.exists(_.toLowerCase == lower) - } // returns the filename without the extension. def stripExtension: String = name stripSuffix ("." + extension) // returns the Path with the extension. @@ -202,19 +195,17 @@ class Path private[io] (val jpath: JPath) { def isFresher(other: Path): Boolean = lastModified.compareTo(other.lastModified) > 0 // creations - def createDirectory(force: Boolean = true, failIfExists: Boolean = false): Directory = { + def createDirectory(force: Boolean = true, failIfExists: Boolean = false): Directory = val res = tryCreate(if (force) Files.createDirectories(jpath) else Files.createDirectory(jpath)) if (!res && failIfExists && exists) fail("Directory '%s' already exists." format name) else if (isDirectory) toDirectory else new Directory(jpath) - } - def createFile(failIfExists: Boolean = false): File = { + def createFile(failIfExists: Boolean = false): File = val res = tryCreate(Files.createFile(jpath)) Files.createFile(jpath) if (!res && failIfExists && exists) fail("File '%s' already exists." format name) else if (isFile) toFile else new File(jpath) - } private def tryCreate(create: => JPath): Boolean = try { create; true } catch { case _: FileAlreadyExistsException => false } @@ -226,36 +217,29 @@ class Path private[io] (val jpath: JPath) { /** Deletes the path recursively. Returns false on failure. * Use with caution! */ - def deleteRecursively(): Boolean = { + def deleteRecursively(): Boolean = if (!exists) false - else { + else Files.walkFileTree(jpath, new SimpleFileVisitor[JPath]() { - override def visitFile(file: JPath, attrs: BasicFileAttributes) = { + override def visitFile(file: JPath, attrs: BasicFileAttributes) = Files.delete(file) FileVisitResult.CONTINUE - } - override def postVisitDirectory(dir: JPath, exc: IOException) = { + override def postVisitDirectory(dir: JPath, exc: IOException) = Files.delete(dir) FileVisitResult.CONTINUE - } }) true - } - } def truncate(): Boolean = - isFile && { + isFile `&&`: val raf = new RandomAccessFile(jpath.toFile, "rw") raf setLength 0 raf.close() length == 0 - } override def toString(): String = path - override def equals(other: Any): Boolean = other match { + override def equals(other: Any): Boolean = other match case x: Path => path == x.path case _ => false - } override def hashCode(): Int = path.hashCode() -} diff --git a/compiler/src/dotty/tools/io/PlainFile.scala b/compiler/src/dotty/tools/io/PlainFile.scala index 898e037488f7..10ef134733ae 100644 --- a/compiler/src/dotty/tools/io/PlainFile.scala +++ b/compiler/src/dotty/tools/io/PlainFile.scala @@ -12,46 +12,42 @@ import java.io.{InputStream, OutputStream} import java.nio.file.{InvalidPathException, Paths} /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { +class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath): override def isDirectory: Boolean = true override def iterator(): Iterator[PlainFile] = givenPath.list.filter(_.exists).map(new PlainFile(_)) override def delete(): Unit = givenPath.deleteRecursively() -} /** This class implements an abstract file backed by a File. * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -class PlainFile(val givenPath: Path) extends AbstractFile { +class PlainFile(val givenPath: Path) extends AbstractFile: assert(path ne null) dotc.util.Stats.record("new PlainFile") def jpath: JPath = givenPath.jpath - override def underlyingSource = { + override def underlyingSource = val fileSystem = jpath.getFileSystem - fileSystem.provider().getScheme match { + fileSystem.provider().getScheme match case "jar" => val fileStores = fileSystem.getFileStores.iterator() - if (fileStores.hasNext) { + if (fileStores.hasNext) val jarPath = fileStores.next().name - try { + try Some(new PlainFile(new Path(Paths.get(jarPath.stripSuffix(fileSystem.getSeparator))))) - } catch { + catch case _: InvalidPathException => None - } - } else None + else None case "jrt" => - if (jpath.getNameCount > 2 && jpath.startsWith("/modules")) { + if (jpath.getNameCount > 2 && jpath.startsWith("/modules")) // TODO limit this to OpenJDK based JVMs? val moduleName = jpath.getName(1) Some(new PlainFile(new Path(Paths.get(System.getProperty("java.home"), "jmods", moduleName.toString + ".jmod")))) - } else None + else None case _ => None - } - } /** Returns the name of this abstract file. */ @@ -72,10 +68,9 @@ class PlainFile(val givenPath: Path) extends AbstractFile { override def sizeOption: Option[Int] = Some(givenPath.length.toInt) override def hashCode(): Int = System.identityHashCode(absolutePath) - override def equals(that: Any): Boolean = that match { + override def equals(that: Any): Boolean = that match case x: PlainFile => absolutePath `eq` x.absolutePath case _ => false - } /** Is this abstract file a directory? */ def isDirectory: Boolean = givenPath.isDirectory @@ -84,15 +79,13 @@ class PlainFile(val givenPath: Path) extends AbstractFile { def lastModified: Long = givenPath.lastModified.toMillis /** Returns all abstract subfiles of this abstract directory. */ - def iterator: Iterator[AbstractFile] = { + def iterator: Iterator[AbstractFile] = // Optimization: Assume that the file was not deleted and did not have permissions changed // between the call to `list` and the iteration. This saves a call to `exists`. - def existsFast(path: Path) = path match { + def existsFast(path: Path) = path match case (_: Directory | _: File) => true case _ => path.exists - } givenPath.toDirectory.list.filter(existsFast).map(new PlainFile(_)) - } /** * Returns the abstract file in this abstract directory with the @@ -100,11 +93,10 @@ class PlainFile(val givenPath: Path) extends AbstractFile { * argument "directory" tells whether to look for a directory or * or a regular file. */ - def lookupName(name: String, directory: Boolean): AbstractFile = { + def lookupName(name: String, directory: Boolean): AbstractFile = val child = givenPath / name if ((child.isDirectory && directory) || (child.isFile && !directory)) new PlainFile(child) else null - } /** Does this abstract file denote an existing file? */ def create(): Unit = if (!exists) givenPath.createFile() @@ -119,9 +111,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile { */ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = new PlainFile(givenPath / name) -} -object PlainFile { +object PlainFile: extension (jPath: JPath) def toPlainFile = new PlainFile(new Path(jPath)) -} diff --git a/compiler/src/dotty/tools/io/Streamable.scala b/compiler/src/dotty/tools/io/Streamable.scala index 328ce03f7853..f0755248f93a 100644 --- a/compiler/src/dotty/tools/io/Streamable.scala +++ b/compiler/src/dotty/tools/io/Streamable.scala @@ -21,7 +21,7 @@ import Path.fail * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -object Streamable { +object Streamable: /** Traits which can be viewed as a sequence of bytes. Source types * which know their length should override def length: Long for more * efficient method implementations. @@ -32,21 +32,20 @@ object Streamable { * Several methods (such as `chars` and `lines`) create InputStreams they * don't close */ - trait Bytes { + trait Bytes: def inputStream(): InputStream def length: Long = -1 def bufferedInput(): BufferedInputStream = new BufferedInputStream(inputStream()) def bytes(): Iterator[Byte] = bytesAsInts() map (_.toByte) - def bytesAsInts(): Iterator[Int] = { + def bytesAsInts(): Iterator[Int] = val in = bufferedInput() Iterator continually in.read() takeWhile (_ != -1) - } /** This method aspires to be the fastest way to read * a stream of known length into memory. */ - def toByteArray(): Array[Byte] = { + def toByteArray(): Array[Byte] = // if we don't know the length, fall back on relative inefficiency if (length == -1L) return (new ArrayBuffer[Byte]() ++= bytes()).toArray @@ -56,28 +55,23 @@ object Streamable { lazy val in = bufferedInput() var offset = 0 - def loop(): Unit = { - if (offset < len) { + def loop(): Unit = + if (offset < len) val read = in.read(arr, offset, len - offset) - if (read >= 0) { + if (read >= 0) offset += read loop() - } - } - } try loop() finally in.close() if (offset == arr.length) arr else fail("Could not read entire source (%d of %d bytes)".format(offset, len)) - } - } /** For objects which can be viewed as Chars. * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ - trait Chars extends Bytes { + trait Chars extends Bytes: /** Calls to methods requiring byte<->char transformations should be offered * in a form which allows specifying the codec. When it is not specified, * the one discovered at creation time will be used, which will always find the @@ -106,20 +100,17 @@ object Streamable { /** Creates a BufferedReader and applies the closure, automatically closing it on completion. */ - def applyReader[T](f: BufferedReader => T): T = { + def applyReader[T](f: BufferedReader => T): T = val in = bufferedReader() try f(in) finally in.close() - } /** Convenience function to import entire file into a String. */ def slurp(): String = slurp(creationCodec) - def slurp(codec: Codec): String = { + def slurp(codec: Codec): String = val src = chars(codec) try src.mkString finally src.close() // Always Be Closing - } - } /** Call a function on something Closeable, finally closing it. */ def closing[T <: JCloseable, U](stream: T)(f: T => U): U = @@ -136,4 +127,3 @@ object Streamable { def slurp(url: URL)(implicit codec: Codec): String = slurp(url.openStream()) -} diff --git a/compiler/src/dotty/tools/io/VirtualDirectory.scala b/compiler/src/dotty/tools/io/VirtualDirectory.scala index 157f63a2ac1a..2d44404f386f 100644 --- a/compiler/src/dotty/tools/io/VirtualDirectory.scala +++ b/compiler/src/dotty/tools/io/VirtualDirectory.scala @@ -16,12 +16,11 @@ import java.io.{InputStream, OutputStream} * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class VirtualDirectory(val name: String, maybeContainer: Option[VirtualDirectory] = None) -extends AbstractFile { +extends AbstractFile: def path: String = - maybeContainer match { + maybeContainer match case None => name case Some(parent) => parent.path + '/' + name - } def absolute: AbstractFile = this @@ -55,20 +54,16 @@ extends AbstractFile { (files get name filter (_.isDirectory == directory)).orNull override def fileNamed(name: String): AbstractFile = - Option(lookupName(name, directory = false)) getOrElse { + Option(lookupName(name, directory = false)) getOrElse: val newFile = new VirtualFile(name, s"$path/$name") files(name) = newFile newFile - } override def subdirectoryNamed(name: String): AbstractFile = - Option(lookupName(name, directory = true)) getOrElse { + Option(lookupName(name, directory = true)) getOrElse: val dir = new VirtualDirectory(name, Some(this)) files(name) = dir dir - } - def clear(): Unit = { + def clear(): Unit = files.clear() - } -} diff --git a/compiler/src/dotty/tools/io/VirtualFile.scala b/compiler/src/dotty/tools/io/VirtualFile.scala index 9d290a9b0e6a..531b5f176998 100644 --- a/compiler/src/dotty/tools/io/VirtualFile.scala +++ b/compiler/src/dotty/tools/io/VirtualFile.scala @@ -16,7 +16,7 @@ import java.io.{ ByteArrayInputStream, ByteArrayOutputStream, InputStream, Outpu * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -class VirtualFile(val name: String, override val path: String) extends AbstractFile { +class VirtualFile(val name: String, override val path: String) extends AbstractFile: /** * Initializes this instance with the specified name and an @@ -35,10 +35,9 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF * @param content the initial contents of the virtual file * @return the created virtual file */ - def this(path: String, content: Array[Byte]) = { + def this(path: String, content: Array[Byte]) = this(VirtualFile.nameOf(path), path) this.content = content - } private var content = Array.emptyByteArray @@ -51,14 +50,11 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF def input : InputStream = new ByteArrayInputStream(content) - override def output: OutputStream = { - new ByteArrayOutputStream() { - override def close() = { + override def output: OutputStream = + new ByteArrayOutputStream(): + override def close() = super.close() content = toByteArray() - } - } - } def container: AbstractFile = NoAbstractFile @@ -77,10 +73,9 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF def lastModified: Long = 0 /** Returns all abstract subfiles of this abstract directory. */ - def iterator: Iterator[AbstractFile] = { + def iterator: Iterator[AbstractFile] = assert(isDirectory, "not a directory '" + this + "'") Iterator.empty - } /** Does this abstract file denote an existing file? */ def create(): Unit = unsupported() @@ -94,16 +89,14 @@ class VirtualFile(val name: String, override val path: String) extends AbstractF * argument "directory" tells whether to look for a directory or * or a regular file. */ - def lookupName(name: String, directory: Boolean): AbstractFile = { + def lookupName(name: String, directory: Boolean): AbstractFile = assert(isDirectory, "not a directory '" + this + "'") null - } /** Returns an abstract file with the given name. It does not * check that it exists. */ def lookupNameUnchecked(name: String, directory: Boolean): AbstractFile = unsupported() -} object VirtualFile: private def nameOf(path: String): String = val i = path.lastIndexOf('/') diff --git a/compiler/src/dotty/tools/io/ZipArchive.scala b/compiler/src/dotty/tools/io/ZipArchive.scala index 4383bc187979..4dc6efd84aec 100644 --- a/compiler/src/dotty/tools/io/ZipArchive.scala +++ b/compiler/src/dotty/tools/io/ZipArchive.scala @@ -25,7 +25,7 @@ import scala.jdk.CollectionConverters._ * * ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -object ZipArchive { +object ZipArchive: private[io] val closeZipFile: Boolean = sys.props.get("scala.classpath.closeZip").exists(_.toBoolean) /** @@ -39,7 +39,7 @@ object ZipArchive { private def dirName(path: String) = splitPath(path, front = true) private def baseName(path: String) = splitPath(path, front = false) - private def splitPath(path0: String, front: Boolean): String = { + private def splitPath(path0: String, front: Boolean): String = val isDir = path0.charAt(path0.length - 1) == '/' val path = if (isDir) path0.substring(0, path0.length - 1) else path0 val idx = path.lastIndexOf('/') @@ -50,11 +50,9 @@ object ZipArchive { else if (front) path.substring(0, idx + 1) else path.substring(idx + 1) - } -} import ZipArchive._ /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -abstract class ZipArchive(override val jpath: JPath, release: Option[String]) extends AbstractFile with Equals { +abstract class ZipArchive(override val jpath: JPath, release: Option[String]) extends AbstractFile with Equals: self => override def underlyingSource: Option[ZipArchive] = Some(this) @@ -68,24 +66,21 @@ abstract class ZipArchive(override val jpath: JPath, release: Option[String]) ex def absolute: AbstractFile = unsupported() /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ - sealed abstract class Entry(path: String, val parent: Entry) extends VirtualFile(baseName(path), path) { + sealed abstract class Entry(path: String, val parent: Entry) extends VirtualFile(baseName(path), path): // have to keep this name for compat with sbt's compiler-interface def getArchive: ZipFile = null override def underlyingSource: Option[ZipArchive] = Some(self) override def toString: String = self.path + "(" + path + ")" - } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ - class DirEntry(path: String, parent: Entry) extends Entry(path, parent) { + class DirEntry(path: String, parent: Entry) extends Entry(path, parent): val entries: mutable.HashMap[String, Entry] = mutable.HashMap() override def isDirectory: Boolean = true override def iterator: Iterator[Entry] = entries.valuesIterator - override def lookupName(name: String, directory: Boolean): Entry = { + override def lookupName(name: String, directory: Boolean): Entry = if (directory) entries.get(name + "/").orNull else entries.get(name).orNull - } - } private def ensureDir(dirs: mutable.Map[String, DirEntry], path: String): DirEntry = //OPT inlined from getOrElseUpdate; saves ~50K closures on test run. @@ -96,7 +91,7 @@ abstract class ZipArchive(override val jpath: JPath, release: Option[String]) ex // parent.entries(baseName(path)) = dir // dir // }) - dirs get path match { + dirs get path match case Some(v) => v case None => val parent = ensureDir(dirs, dirName(path)) @@ -104,46 +99,38 @@ abstract class ZipArchive(override val jpath: JPath, release: Option[String]) ex parent.entries(baseName(path)) = dir dirs(path) = dir dir - } - protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = { + protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = if (entry.isDirectory) ensureDir(dirs, entry.getName) else ensureDir(dirs, dirName(entry.getName)) - } def close(): Unit -} /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -final class FileZipArchive(jpath: JPath, release: Option[String]) extends ZipArchive(jpath, release) { - private def openZipFile(): ZipFile = try { - release match { +final class FileZipArchive(jpath: JPath, release: Option[String]) extends ZipArchive(jpath, release): + private def openZipFile(): ZipFile = try + release match case Some(r) if file.getName.endsWith(".jar") => val releaseVersion = JDK9Reflectors.runtimeVersionParse(r) JDK9Reflectors.newJarFile(file, true, ZipFile.OPEN_READ, releaseVersion) case _ => new ZipFile(file) - } - } catch { + catch case ioe: IOException => throw new IOException("Error accessing " + file.getPath, ioe) - } private class LazyEntry( name: String, time: Long, size: Int, parent: DirEntry - ) extends Entry(name, parent) { + ) extends Entry(name, parent): override def lastModified: Long = time // could be stale - override def input: InputStream = { + override def input: InputStream = val zipFile = openZipFile() val entry = zipFile.getEntry(name) // with `-release`, returns the correct version under META-INF/versions val `delegate` = zipFile.getInputStream(entry) - new FilterInputStream(`delegate`) { + new FilterInputStream(`delegate`): override def close(): Unit = { zipFile.close() } - } - } override def sizeOption: Option[Int] = Some(size) // could be stale - } // keeps a file handle open to ZipFile, which forbids file mutation // on Windows, and leaks memory on all OS (typically by stopping @@ -153,28 +140,27 @@ final class FileZipArchive(jpath: JPath, release: Option[String]) extends ZipArc zipFile: ZipFile, zipEntry: ZipEntry, parent: DirEntry - ) extends Entry(zipEntry.getName, parent) { + ) extends Entry(zipEntry.getName, parent): override def lastModified: Long = zipEntry.getTime override def input: InputStream = zipFile.getInputStream(zipEntry) override def sizeOption: Option[Int] = Some(zipEntry.getSize.toInt) - } - @volatile lazy val (root, allDirs): (DirEntry, collection.Map[String, DirEntry]) = { + @volatile lazy val (root, allDirs): (DirEntry, collection.Map[String, DirEntry]) = val root = new DirEntry("/", null) val dirs = mutable.HashMap[String, DirEntry]("/" -> root) val zipFile = openZipFile() val entries = zipFile.entries() - try { - while (entries.hasMoreElements) { + try + while (entries.hasMoreElements) val zipEntry = entries.nextElement - if (!zipEntry.getName.startsWith("META-INF/versions/")) { - val zipEntryVersioned = if (release.isDefined) { + if (!zipEntry.getName.startsWith("META-INF/versions/")) + val zipEntryVersioned = if (release.isDefined) // JARFile will return the entry for the corresponding release-dependent version here under META-INF/versions zipFile.getEntry(zipEntry.getName) - } else zipEntry + else zipEntry - if (!zipEntry.isDirectory) { + if (!zipEntry.isDirectory) val dir = getDir(dirs, zipEntry) val f = if (ZipArchive.closeZipFile) @@ -188,15 +174,10 @@ final class FileZipArchive(jpath: JPath, release: Option[String]) extends ZipArc new LeakyEntry(zipFile, zipEntryVersioned, dir) dir.entries(f.name) = f - } - } - } - } finally { + finally if (ZipArchive.closeZipFile) zipFile.close() else closeables ::= zipFile - } (root, dirs) - } def iterator: Iterator[Entry] = root.iterator @@ -208,20 +189,17 @@ final class FileZipArchive(jpath: JPath, release: Option[String]) extends ZipArc override def sizeOption: Option[Int] = Some(Files.size(jpath).toInt) override def canEqual(other: Any): Boolean = other.isInstanceOf[FileZipArchive] override def hashCode(): Int = jpath.hashCode - override def equals(that: Any): Boolean = that match { + override def equals(that: Any): Boolean = that match case x: FileZipArchive => jpath.toAbsolutePath == x.jpath.toAbsolutePath case _ => false - } private[this] var closeables: List[java.io.Closeable] = Nil - override def close(): Unit = { + override def close(): Unit = closeables.foreach(_.close) closeables = Nil - } -} -final class ManifestResources(val url: URL) extends ZipArchive(null, None) { - def iterator(): Iterator[AbstractFile] = { +final class ManifestResources(val url: URL) extends ZipArchive(null, None): + def iterator(): Iterator[AbstractFile] = val root = new DirEntry("/", null) val dirs = mutable.HashMap[String, DirEntry]("/" -> root) val stream = input @@ -230,29 +208,24 @@ final class ManifestResources(val url: URL) extends ZipArchive(null, None) { closeables ::= stream - for (zipEntry <- iter) { + for (zipEntry <- iter) val dir = getDir(dirs, zipEntry) - if (!zipEntry.isDirectory) { - class FileEntry() extends Entry(zipEntry.getName, dir) { + if (!zipEntry.isDirectory) + class FileEntry() extends Entry(zipEntry.getName, dir): override def lastModified = zipEntry.getTime() override def input = resourceInputStream(this.path) override def sizeOption = None - } val f = new FileEntry() dir.entries(f.name) = f - } - } try root.iterator finally dirs.clear() - } def name: String = path - def path: String = { + def path: String = val s = url.getPath val n = s.lastIndexOf('!') s.substring(0, n) - } def input: InputStream = url.openStream() def lastModified: Long = try url.openConnection().getLastModified() @@ -260,29 +233,22 @@ final class ManifestResources(val url: URL) extends ZipArchive(null, None) { override def canEqual(other: Any): Boolean = other.isInstanceOf[ManifestResources] override def hashCode(): Int = url.hashCode - override def equals(that: Any): Boolean = that match { + override def equals(that: Any): Boolean = that match case x: ManifestResources => url == x.url case _ => false - } - private def resourceInputStream(path: String): InputStream = { - new FilterInputStream(null) { - override def read(): Int = { + private def resourceInputStream(path: String): InputStream = + new FilterInputStream(null): + override def read(): Int = if (in == null) in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path) if (in == null) throw new RuntimeException(path + " not found") super.read() - } - override def close(): Unit = { + override def close(): Unit = super.close() in = null - } - } - } private[this] var closeables: List[java.io.Closeable] = Nil - override def close(): Unit = { + override def close(): Unit = closeables.foreach(_.close()) closeables = Nil - } -} diff --git a/compiler/src/dotty/tools/io/package.scala b/compiler/src/dotty/tools/io/package.scala index 19bb6e4ad19b..7dabd790722a 100644 --- a/compiler/src/dotty/tools/io/package.scala +++ b/compiler/src/dotty/tools/io/package.scala @@ -5,8 +5,7 @@ package dotty.tools -package object io { +package object io: type JManifest = java.util.jar.Manifest type JFile = java.io.File type JPath = java.nio.file.Path -} diff --git a/compiler/src/dotty/tools/package.scala b/compiler/src/dotty/tools/package.scala index f90355b1fa8e..9ce2a4d652b9 100644 --- a/compiler/src/dotty/tools/package.scala +++ b/compiler/src/dotty/tools/package.scala @@ -1,6 +1,6 @@ package dotty -package object tools { +package object tools: val ListOfNil: List[Nil.type] = Nil :: Nil @@ -27,11 +27,10 @@ package object tools { inline def nen(y: T | Null): Boolean = !eqn(y) - object resultWrapper { + object resultWrapper: opaque type WrappedResult[T] = T private[tools] def unwrap[T](x: WrappedResult[T]): T = x private[tools] def wrap[T](x: T): WrappedResult[T] = x - } type WrappedResult[T] = resultWrapper.WrappedResult[T] def WrappedResult[T](x: T) = resultWrapper.wrap(x) def result[T](using x: WrappedResult[T]): T = resultWrapper.unwrap(x) @@ -50,4 +49,3 @@ package object tools { // when handling stack overflows and every operation (including class loading) // risks failing. dotty.tools.dotc.core.handleRecursive - } diff --git a/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala b/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala index 7a457a1d7546..33c98c1d344d 100644 --- a/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala +++ b/compiler/src/dotty/tools/repl/AbstractFileClassLoader.scala @@ -30,32 +30,27 @@ class AbstractFileClassLoader(val root: AbstractFile, parent: ClassLoader) exten findAbstractFile(name) match case null => null case file => new URL(null, s"memory:${file.path}", new URLStreamHandler { - override def openConnection(url: URL): URLConnection = new URLConnection(url) { - override def connect() = () - override def getInputStream = file.input - } - }) + override def openConnection(url: URL): URLConnection = new URLConnection(url): + override def connect() = () + override def getInputStream = file.input + }) override protected def findResources(name: String) = findResource(name) match case null => Collections.enumeration(Collections.emptyList[URL]) //Collections.emptyEnumeration[URL] case url => Collections.enumeration(Collections.singleton(url)) - override def findClass(name: String): Class[?] = { + override def findClass(name: String): Class[?] = var file: AbstractFile = root val pathParts = name.split("[./]").toList - for (dirPart <- pathParts.init) { + for (dirPart <- pathParts.init) file = file.lookupName(dirPart, true) - if (file == null) { + if (file == null) throw new ClassNotFoundException(name) - } - } file = file.lookupName(pathParts.last+".class", false) - if (file == null) { + if (file == null) throw new ClassNotFoundException(name) - } val bytes = file.toByteArray defineClass(name, bytes, 0, bytes.length) - } override def loadClass(name: String): Class[?] = try findClass(name) catch case _: ClassNotFoundException => super.loadClass(name) end AbstractFileClassLoader diff --git a/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala b/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala index d539c1986f8d..9cb9cb1fd9d8 100644 --- a/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala +++ b/compiler/src/dotty/tools/repl/CollectTopLevelImports.scala @@ -9,7 +9,7 @@ import dotty.tools.dotc.core.Phases.Phase * These imports must be collected as typed trees and therefore * after Typer. */ -class CollectTopLevelImports extends Phase { +class CollectTopLevelImports extends Phase: import tpd._ def phaseName: String = "collectTopLevelImports" @@ -17,13 +17,10 @@ class CollectTopLevelImports extends Phase { private var myImports: List[Import] = _ def imports: List[Import] = myImports - def run(using Context): Unit = { - def topLevelImports(tree: Tree) = { + def run(using Context): Unit = + def topLevelImports(tree: Tree) = val PackageDef(_, _ :: TypeDef(_, rhs: Template) :: _) = tree: @unchecked rhs.body.collect { case tree: Import => tree } - } val tree = ctx.compilationUnit.tpdTree myImports = topLevelImports(tree) - } -} diff --git a/compiler/src/dotty/tools/repl/JLineTerminal.scala b/compiler/src/dotty/tools/repl/JLineTerminal.scala index 8e048d786ae1..c27a8993f189 100644 --- a/compiler/src/dotty/tools/repl/JLineTerminal.scala +++ b/compiler/src/dotty/tools/repl/JLineTerminal.scala @@ -16,7 +16,7 @@ import org.jline.reader.impl.history.DefaultHistory import org.jline.terminal.TerminalBuilder import org.jline.utils.AttributedString -class JLineTerminal extends java.io.Closeable { +class JLineTerminal extends java.io.Closeable: // import java.util.logging.{Logger, Level} // Logger.getLogger("org.jline").setLevel(Level.FINEST) @@ -49,7 +49,7 @@ class JLineTerminal extends java.io.Closeable { */ def readLine( completer: Completer // provide auto-completions - )(using Context): String = { + )(using Context): String = import LineReader.Option._ import LineReader._ val userHome = System.getProperty("user.home") @@ -73,22 +73,19 @@ class JLineTerminal extends java.io.Closeable { .build() lineReader.readLine(prompt) - } def close(): Unit = terminal.close() /** Provide syntax highlighting */ - private class Highlighter(using Context) extends reader.Highlighter { - def highlight(reader: LineReader, buffer: String): AttributedString = { + private class Highlighter(using Context) extends reader.Highlighter: + def highlight(reader: LineReader, buffer: String): AttributedString = val highlighted = SyntaxHighlighting.highlight(buffer) AttributedString.fromAnsi(highlighted) - } def setErrorPattern(errorPattern: java.util.regex.Pattern): Unit = {} def setErrorIndex(errorIndex: Int): Unit = {} - } /** Provide multi-line editing support */ - private class Parser(using Context) extends reader.Parser { + private class Parser(using Context) extends reader.Parser: /** * @param cursor The cursor position within the line @@ -98,13 +95,12 @@ class JLineTerminal extends java.io.Closeable { */ private class ParsedLine( val cursor: Int, val line: String, val word: String, val wordCursor: Int - ) extends reader.ParsedLine { + ) extends reader.ParsedLine: // Using dummy values, not sure what they are used for def wordIndex = -1 def words = java.util.Collections.emptyList[String] - } - def parse(input: String, cursor: Int, context: ParseContext): reader.ParsedLine = { + def parse(input: String, cursor: Int, context: ParseContext): reader.ParsedLine = def parsedLine(word: String, wordCursor: Int) = new ParsedLine(cursor, input, word, wordCursor) // Used when no word is being completed @@ -118,12 +114,12 @@ class JLineTerminal extends java.io.Closeable { /* missing = */ newLinePrompt) case class TokenData(token: Token, start: Int, end: Int) - def currentToken: TokenData /* | Null */ = { + def currentToken: TokenData /* | Null */ = val source = SourceFile.virtual("", input) val scanner = new Scanner(source)(using ctx.fresh.setReporter(Reporter.NoReporter)) var lastBacktickErrorStart: Option[Int] = None - while (scanner.token != EOF) { + while (scanner.token != EOF) val start = scanner.offset val token = scanner.token scanner.nextToken() @@ -139,16 +135,13 @@ class JLineTerminal extends java.io.Closeable { lastBacktickErrorStart = Some(start) else lastBacktickErrorStart = None - } null - } - def acceptLine = { + def acceptLine = val onLastLine = !input.substring(cursor).contains(System.lineSeparator) onLastLine && !ParseResult.isIncomplete(input) - } - context match { + context match case ParseContext.ACCEPT_LINE if acceptLine => // using dummy values, resulting parsed input is probably unused defaultParsedLine @@ -164,18 +157,13 @@ class JLineTerminal extends java.io.Closeable { case ParseContext.COMPLETE => // Parse to find completions (typically after a Tab). def isCompletable(token: Token) = isIdentifier(token) || isKeyword(token) - currentToken match { + currentToken match case TokenData(token, start, end) if isCompletable(token) => val word = input.substring(start, end) val wordCursor = cursor - start parsedLine(word, wordCursor) case _ => defaultParsedLine - } case _ => incomplete() - } - } - } -} diff --git a/compiler/src/dotty/tools/repl/Main.scala b/compiler/src/dotty/tools/repl/Main.scala index 7eb906edc586..a48c20934fcf 100644 --- a/compiler/src/dotty/tools/repl/Main.scala +++ b/compiler/src/dotty/tools/repl/Main.scala @@ -2,7 +2,6 @@ package dotty.tools.repl /** Main entry point to the REPL */ // To test, run bin/scala -object Main { +object Main: def main(args: Array[String]): Unit = new ReplDriver(args).tryRunning -} diff --git a/compiler/src/dotty/tools/repl/ParseResult.scala b/compiler/src/dotty/tools/repl/ParseResult.scala index a67b247066f7..a9935548d1c2 100644 --- a/compiler/src/dotty/tools/repl/ParseResult.scala +++ b/compiler/src/dotty/tools/repl/ParseResult.scala @@ -48,9 +48,8 @@ case class AmbiguousCommand(cmd: String, matchingCommands: List[String]) extends * the REPL */ case class Load(path: String) extends Command -object Load { +object Load: val command: String = ":load" -} /** To find out the type of an expression you may simply do: * @@ -60,47 +59,41 @@ object Load { * ``` */ case class TypeOf(expr: String) extends Command -object TypeOf { +object TypeOf: val command: String = ":type" -} /** * A command that is used to display the documentation associated with * the given expression. */ case class DocOf(expr: String) extends Command -object DocOf { +object DocOf: val command: String = ":doc" -} /** `:imports` lists the imports that have been explicitly imported during the * session */ -case object Imports extends Command { +case object Imports extends Command: val command: String = ":imports" -} case class Settings(arg: String) extends Command -object Settings { +object Settings: val command: String = ":settings" -} /** Reset the session to the initial state from when the repl program was * started */ case class Reset(arg: String) extends Command -object Reset { +object Reset: val command: String = ":reset" -} /** `:quit` exits the repl */ -case object Quit extends Command { +case object Quit extends Command: val command: String = ":quit" val alias: String = ":exit" -} /** `:help` shows the different commands implemented by the Dotty repl */ -case object Help extends Command { +case object Help extends Command: val command: String = ":help" val text: String = """The REPL has several commands available: @@ -114,18 +107,16 @@ case object Help extends Command { |:reset [options] reset the repl to its initial state, forgetting all session entries |:settings update compiler options, if possible """.stripMargin -} -object ParseResult { +object ParseResult: @sharable private val CommandExtract = """(:[\S]+)\s*(.*)""".r - private def parseStats(using Context): List[untpd.Tree] = { + private def parseStats(using Context): List[untpd.Tree] = val parser = new Parser(ctx.source) val stats = parser.blockStatSeq() parser.accept(Tokens.EOF) stats - } private[repl] val commands: List[(String, String => ParseResult)] = List( Quit.command -> (_ => Quit), @@ -139,20 +130,19 @@ object ParseResult { Settings.command -> (arg => Settings(arg)), ) - def apply(source: SourceFile)(using state: State): ParseResult = { + def apply(source: SourceFile)(using state: State): ParseResult = val sourceCode = source.content().mkString - sourceCode match { + sourceCode match case "" => Newline case CommandExtract(cmd, arg) => { - val matchingCommands = commands.filter((command, _) => command.startsWith(cmd)) - matchingCommands match { - case Nil => UnknownCommand(cmd) - case (_, f) :: Nil => f(arg) - case multiple => AmbiguousCommand(cmd, multiple.map(_._1)) + val matchingCommands = commands.filter((command, _) => command.startsWith(cmd)) + matchingCommands match + case Nil => UnknownCommand(cmd) + case (_, f) :: Nil => f(arg) + case multiple => AmbiguousCommand(cmd, multiple.map(_._1)) } - } case _ => - inContext(state.context) { + inContext(state.context): val reporter = newStoreReporter val stats = parseStats(using state.context.fresh.setReporter(reporter).withSource(source)) @@ -163,9 +153,6 @@ object ParseResult { stats) else Parsed(source, stats, reporter) - } - } - } def apply(sourceCode: String)(using state: State): ParseResult = maybeIncomplete(sourceCode, maybeIncomplete = true) @@ -182,20 +169,17 @@ object ParseResult { * having to evaluate the expression. */ def isIncomplete(sourceCode: String)(using Context): Boolean = - sourceCode match { + sourceCode match case CommandExtract(_) | "" => false case _ => { - val reporter = newStoreReporter - val source = SourceFile.virtual("", sourceCode, maybeIncomplete = true) - val unit = CompilationUnit(source, mustExist = false) - val localCtx = ctx.fresh + val reporter = newStoreReporter + val source = SourceFile.virtual("", sourceCode, maybeIncomplete = true) + val unit = CompilationUnit(source, mustExist = false) + val localCtx = ctx.fresh .setCompilationUnit(unit) .setReporter(reporter) - var needsMore = false - reporter.withIncompleteHandler((_, _) => needsMore = true) { - parseStats(using localCtx) + var needsMore = false + reporter.withIncompleteHandler((_, _) => needsMore = true): + parseStats(using localCtx) + !reporter.hasErrors && needsMore } - !reporter.hasErrors && needsMore - } - } -} diff --git a/compiler/src/dotty/tools/repl/Rendering.scala b/compiler/src/dotty/tools/repl/Rendering.scala index c647ef302bb9..abdf99f031d3 100644 --- a/compiler/src/dotty/tools/repl/Rendering.scala +++ b/compiler/src/dotty/tools/repl/Rendering.scala @@ -32,8 +32,8 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): /** Class loader used to load compiled code */ private[repl] def classLoader()(using Context) = if (myClassLoader != null && myClassLoader.root == ctx.settings.outputDir.value) myClassLoader - else { - val parent = Option(myClassLoader).orElse(parentClassLoader).getOrElse { + else + val parent = Option(myClassLoader).orElse(parentClassLoader).getOrElse: val compilerClasspath = ctx.platform.classPath(using ctx).asURLs // We can't use the system classloader as a parent because it would // pollute the user classpath with everything passed to the JVM @@ -43,10 +43,9 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): // which should correspond to the platform classloader on Java 9+. val baseClassLoader = ClassLoader.getSystemClassLoader.getParent new java.net.URLClassLoader(compilerClasspath.toArray, baseClassLoader) - } myClassLoader = new AbstractFileClassLoader(ctx.settings.outputDir.value, parent) - myReplStringOf = { + myReplStringOf = // We need to use the ScalaRunTime class coming from the scala-library // on the user classpath, and not the one available in the current // classloader, so we use reflection instead of simply calling @@ -54,17 +53,15 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): // For old API, try to clean up extraneous newlines by stripping suffix and maybe prefix newline. val scalaRuntime = Class.forName("scala.runtime.ScalaRunTime", true, myClassLoader) val renderer = "stringOf" - def stringOfMaybeTruncated(value: Object, maxElements: Int): String = { - try { + def stringOfMaybeTruncated(value: Object, maxElements: Int): String = + try val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int], classOf[Boolean]) val truly = java.lang.Boolean.TRUE meth.invoke(null, value, maxElements, truly).asInstanceOf[String] - } catch { + catch case _: NoSuchMethodException => val meth = scalaRuntime.getMethod(renderer, classOf[Object], classOf[Int]) meth.invoke(null, value, maxElements).asInstanceOf[String] - } - } (value: Object, maxElements: Int, maxCharacters: Int) => { // `ScalaRuntime.stringOf` may truncate the output, in which case we want to indicate that fact to the user @@ -80,9 +77,7 @@ private[repl] class Rendering(parentClassLoader: Option[ClassLoader] = None): else s"$maybeTruncated ... large output truncated, print value to show all" } - } myClassLoader - } private[repl] def truncate(str: String, maxPrintCharacters: Int)(using ctx: Context): String = val ncp = str.codePointCount(0, str.length) // to not cut inside code point @@ -185,11 +180,10 @@ object Rendering: final val REPL_WRAPPER_NAME_PREFIX = str.REPL_SESSION_LINE extension (s: Symbol) - def showUser(using Context): String = { + def showUser(using Context): String = val printer = new ReplPrinter(ctx) val text = printer.dclText(s) text.mkString(ctx.settings.pageWidth.value, ctx.settings.printLines.value) - } def rootCause(x: Throwable): Throwable = x match case _: ExceptionInInitializerError | diff --git a/compiler/src/dotty/tools/repl/ReplCompiler.scala b/compiler/src/dotty/tools/repl/ReplCompiler.scala index 764695e8479b..5e488d11dcc2 100644 --- a/compiler/src/dotty/tools/repl/ReplCompiler.scala +++ b/compiler/src/dotty/tools/repl/ReplCompiler.scala @@ -41,13 +41,13 @@ class ReplCompiler extends Compiler: ) def newRun(initCtx: Context, state: State): Run = - val run = new Run(this, initCtx) { + val run = new Run(this, initCtx): /** Import previous runs and user defined imports */ - override protected def rootContext(using Context): Context = { + override protected def rootContext(using Context): Context = def importContext(imp: tpd.Import)(using Context) = ctx.importContext(imp, imp.symbol) - def importPreviousRun(id: Int)(using Context) = { + def importPreviousRun(id: Int)(using Context) = // we first import the wrapper object id val path = nme.EMPTY_PACKAGE ++ "." ++ ReplCompiler.objectNames(id) val ctx0 = ctx.fresh @@ -59,15 +59,12 @@ class ReplCompiler extends Compiler: if imports.isEmpty then ctx0 else imports.foldLeft(ctx0.fresh.setNewScope)((ctx, imp) => importContext(imp)(using ctx)) - } val rootCtx = super.rootContext.fresh .withRootImports .fresh.setOwner(defn.EmptyPackageClass): Context (state.validObjectIndexes).foldLeft(rootCtx)((ctx, id) => importPreviousRun(id)(using ctx)) - } - } run.suppressions.initSuspendedMessages(state.context.run) run end newRun @@ -95,17 +92,16 @@ class ReplCompiler extends Compiler: final def typeOf(expr: String)(using state: State): Result[String] = typeCheck(expr).map { tree => given Context = state.context - tree.rhs match { + tree.rhs match case Block(xs, _) => xs.last.tpe.widen.show case _ => """Couldn't compute the type of your expression, so sorry :( | |Please report this to my masters at github.com/lampepfl/dotty """.stripMargin - } } - def docOf(expr: String)(using state: State): Result[String] = inContext(state.context) { + def docOf(expr: String)(using state: State): Result[String] = inContext(state.context): /** Extract the "selected" symbol from `tree`. * @@ -120,20 +116,18 @@ class ReplCompiler extends Compiler: * This function returns the right symbol for the received expression, and all the symbols that are * overridden. */ - def extractSymbols(tree: tpd.Tree): Iterator[Symbol] = { - val sym = tree match { + def extractSymbols(tree: tpd.Tree): Iterator[Symbol] = + val sym = tree match case tree if tree.isInstantiation => tree.symbol.owner case tpd.closureDef(defdef) => defdef.rhs.symbol case _ => tree.symbol - } Iterator(sym) ++ sym.allOverriddenSymbols - } - typeCheck(expr).map { + typeCheck(expr).map: case ValDef(_, _, Block(stats, _)) if stats.nonEmpty => val stat = stats.last.asInstanceOf[tpd.Tree] if (stat.tpe.isError) stat.tpe.show - else { + else val symbols = extractSymbols(stat) val doc = for { sym <- symbols @@ -142,20 +136,17 @@ class ReplCompiler extends Compiler: if (doc.hasNext) doc.next() else s"// No doc for `$expr`" - } case _ => """Couldn't display the documentation for your expression, so sorry :( | |Please report this to my masters at github.com/lampepfl/dotty """.stripMargin - } - } - final def typeCheck(expr: String, errorsAllowed: Boolean = false)(using state: State): Result[tpd.ValDef] = { + final def typeCheck(expr: String, errorsAllowed: Boolean = false)(using state: State): Result[tpd.ValDef] = - def wrapped(expr: String, sourceFile: SourceFile, state: State)(using Context): Result[untpd.PackageDef] = { - def wrap(trees: List[untpd.Tree]): untpd.PackageDef = { + def wrapped(expr: String, sourceFile: SourceFile, state: State)(using Context): Result[untpd.PackageDef] = + def wrap(trees: List[untpd.Tree]): untpd.PackageDef = import untpd._ val valdef = ValDef("expr".toTermName, TypeTree(), Block(trees, unitLiteral).withSpan(Span(0, expr.length))) @@ -164,9 +155,8 @@ class ReplCompiler extends Compiler: .withMods(Modifiers(Final)) .withSpan(Span(0, expr.length)) PackageDef(Ident(nme.EMPTY_PACKAGE), List(wrapper)) - } - ParseResult(sourceFile) match { + ParseResult(sourceFile) match case Parsed(_, trees, _) => wrap(trees).result case SyntaxErrors(_, reported, trees) => @@ -177,32 +167,28 @@ class ReplCompiler extends Compiler: s"Couldn't parse '$expr' to valid scala", sourceFile.atSpan(Span(0, expr.length)) ) - ).errors - } - } + ).errors - def unwrapped(tree: tpd.Tree, sourceFile: SourceFile)(using Context): Result[tpd.ValDef] = { + def unwrapped(tree: tpd.Tree, sourceFile: SourceFile)(using Context): Result[tpd.ValDef] = def error: Result[tpd.ValDef] = List(new Diagnostic.Error(s"Invalid scala expression", sourceFile.atSpan(Span(0, sourceFile.content.length)))).errors import tpd._ - tree match { + tree match case PackageDef(_, List(TypeDef(_, tmpl: Template))) => tmpl.body .collectFirst { case dd: ValDef if dd.name.show == "expr" => dd.result } .getOrElse(error) case _ => error - } - } val src = SourceFile.virtual("", expr) inContext(state.context.fresh .setReporter(newStoreReporter) .setSetting(state.context.settings.YstopAfter, List("typer")) - ) { + ): wrapped(expr, src, state).flatMap { pkg => val unit = CompilationUnit(src) unit.untpdTree = pkg @@ -213,8 +199,6 @@ class ReplCompiler extends Compiler: else ctx.reporter.removeBufferedMessages.errors } - } - } object ReplCompiler: val ReplState: Property.StickyKey[State] = Property.StickyKey() val objectNames = mutable.Map.empty[Int, TermName] @@ -246,13 +230,12 @@ class ReplPhase extends Phase: def run(using Context): Unit = ctx.compilationUnit.untpdTree match case pkg @ PackageDef(_, stats) => - pkg.getAttachment(ReplCompiler.ReplState).foreach { + pkg.getAttachment(ReplCompiler.ReplState).foreach: case given State => val defs = definitions(stats) val res = wrapped(defs, Span(0, stats.last.span.end)) res.putAttachment(ReplCompiler.ReplState, defs.state) ctx.compilationUnit.untpdTree = res - } case _ => end run @@ -262,13 +245,12 @@ class ReplPhase extends Phase: import untpd.* // If trees is of the form `{ def1; def2; def3 }` then `List(def1, def2, def3)` - val flattened = trees match { + val flattened = trees match case List(Block(stats, expr)) => if (expr eq EmptyTree) stats // happens when expr is not an expression else stats :+ expr case _ => trees - } val state = summon[State] var valIdx = state.valIndex @@ -281,11 +263,11 @@ class ReplPhase extends Phase: case tuple: Tuple => for t <- tuple.trees do maybeBumpValIdx(t) case patDef: PatDef => for p <- patDef.pats do maybeBumpValIdx(p) case tree: NameTree => tree.name.show.stripPrefix(str.REPL_RES_PREFIX).toIntOption match - case Some(n) if n >= valIdx => valIdx = n + 1 - case _ => + case Some(n) if n >= valIdx => valIdx = n + 1 + case _ => case _ => - flattened.foreach { + flattened.foreach: case expr @ Assign(id: Ident, _) => // special case simple reassignment (e.g. x = 3) // in order to print the new value in the REPL @@ -300,7 +282,6 @@ class ReplPhase extends Phase: case other => maybeBumpValIdx(other) defs += other - } Definitions(defs.toList, state.copy(objectIndex = state.objectIndex + 1, valIndex = valIdx)) end definitions diff --git a/compiler/src/dotty/tools/repl/ReplDriver.scala b/compiler/src/dotty/tools/repl/ReplDriver.scala index 905f4f06de08..a45a07fc93bd 100644 --- a/compiler/src/dotty/tools/repl/ReplDriver.scala +++ b/compiler/src/dotty/tools/repl/ReplDriver.scala @@ -78,25 +78,22 @@ class ReplDriver(settings: Array[String], override def sourcesRequired: Boolean = false /** Create a fresh and initialized context with IDE mode enabled */ - private def initialCtx(settings: List[String]) = { + private def initialCtx(settings: List[String]) = val rootCtx = initCtx.fresh.addMode(Mode.ReadPositions | Mode.Interactive) rootCtx.setSetting(rootCtx.settings.YcookComments, true) rootCtx.setSetting(rootCtx.settings.YreadComments, true) setupRootCtx(this.settings ++ settings, rootCtx) - } - private def setupRootCtx(settings: Array[String], rootCtx: Context) = { + private def setupRootCtx(settings: Array[String], rootCtx: Context) = setup(settings, rootCtx) match - case Some((files, ictx)) => inContext(ictx) { - shouldStart = true - if files.nonEmpty then out.println(i"Ignoring spurious arguments: $files%, %") - ictx.base.initialize() - ictx - } + case Some((files, ictx)) => inContext(ictx): + shouldStart = true + if files.nonEmpty then out.println(i"Ignoring spurious arguments: $files%, %") + ictx.base.initialize() + ictx case None => shouldStart = false rootCtx - } /** the initial, empty state of the REPL session */ final def initialState: State = State(0, 0, Map.empty, Set.empty, rootCtx) @@ -107,14 +104,13 @@ class ReplDriver(settings: Array[String], * such, when the user enters `:reset` this method should be called to reset * everything properly */ - protected def resetToInitial(settings: List[String] = Nil): Unit = { + protected def resetToInitial(settings: List[String] = Nil): Unit = rootCtx = initialCtx(settings) if (rootCtx.settings.outputDir.isDefault(using rootCtx)) rootCtx = rootCtx.fresh .setSetting(rootCtx.settings.outputDir, new VirtualDirectory("")) compiler = new ReplCompiler rendering = new Rendering(classLoader) - } private var rootCtx: Context = _ private var shouldStart: Boolean = _ @@ -139,7 +135,7 @@ class ReplDriver(settings: Array[String], * observable outside of the CLI, for this reason, most helper methods are * `protected final` to facilitate testing. */ - def runUntilQuit(using initialState: State = initialState)(): State = { + def runUntilQuit(using initialState: State = initialState)(): State = val terminal = new JLineTerminal out.println( @@ -147,40 +143,34 @@ class ReplDriver(settings: Array[String], |Type in expressions for evaluation. Or try :help.""".stripMargin) /** Blockingly read a line, getting back a parse result */ - def readLine()(using state: State): ParseResult = { + def readLine()(using state: State): ParseResult = val completer: Completer = { (_, line, candidates) => val comps = completions(line.cursor, line.line, state) candidates.addAll(comps.asJava) } given Context = state.context - try { + try val line = terminal.readLine(completer) ParseResult(line) - } catch { + catch case _: EndOfFileException | _: UserInterruptException => // Ctrl+D or Ctrl+C Quit - } - } - @tailrec def loop(using state: State)(): State = { + @tailrec def loop(using state: State)(): State = val res = readLine() if (res == Quit) state else loop(using interpret(res))() - } try runBody { loop() } finally terminal.close() - } - final def run(input: String)(using state: State): State = runBody { + final def run(input: String)(using state: State): State = runBody: interpret(ParseResult.complete(input)) - } - final def runQuietly(input: String)(using State): State = runBody { + final def runQuietly(input: String)(using State): State = runBody: val parsed = ParseResult(input) interpret(parsed, quiet = true) - } protected def runBody(body: => State): State = rendering.classLoader()(using rootCtx).asContext(withRedirectedOutput(body)) @@ -201,26 +191,22 @@ class ReplDriver(settings: Array[String], protected def redirectOutput: Boolean = true // redirecting the output allows us to test `println` in scripted tests - private def withRedirectedOutput(op: => State): State = { + private def withRedirectedOutput(op: => State): State = if redirectOutput then val savedOut = System.out val savedErr = System.err - try { + try System.setOut(out) System.setErr(out) op - } - finally { + finally System.setOut(savedOut) System.setErr(savedErr) - } else op - } - private def newRun(state: State, reporter: StoreReporter = newStoreReporter) = { + private def newRun(state: State, reporter: StoreReporter = newStoreReporter) = val run = compiler.newRun(rootCtx.fresh.setReporter(reporter), state) state.copy(context = run.runContext) - } private def stripBackTicks(label: String) = if label.startsWith("`") && label.endsWith("`") then @@ -230,7 +216,7 @@ class ReplDriver(settings: Array[String], /** Extract possible completions at the index of `cursor` in `expr` */ protected final def completions(cursor: Int, expr: String, state0: State): List[Candidate] = - def makeCandidate(label: String) = { + def makeCandidate(label: String) = new Candidate( /* value = */ label, @@ -241,12 +227,10 @@ class ReplDriver(settings: Array[String], /* key = */ null, /* complete = */ false // if true adds space when completing ) - } if expr.startsWith(":") then - ParseResult.commands.collect { + ParseResult.commands.collect: case command if command._1.startsWith(expr) => makeCandidate(command._1) - } else given state: State = newRun(state0) compiler @@ -263,8 +247,8 @@ class ReplDriver(settings: Array[String], .getOrElse(Nil) end completions - protected def interpret(res: ParseResult, quiet: Boolean = false)(using state: State): State = { - res match { + protected def interpret(res: ParseResult, quiet: Boolean = false)(using state: State): State = + res match case parsed: Parsed if parsed.trees.nonEmpty => compile(parsed, state, quiet) @@ -280,15 +264,12 @@ class ReplDriver(settings: Array[String], case _ => // new line, empty tree state - } - } /** Compile `parsed` trees and evolve `state` in accordance */ - private def compile(parsed: Parsed, istate: State, quiet: Boolean = false): State = { - def extractNewestWrapper(tree: untpd.Tree): Name = tree match { + private def compile(parsed: Parsed, istate: State, quiet: Boolean = false): State = + def extractNewestWrapper(tree: untpd.Tree): Name = tree match case PackageDef(_, (obj: untpd.ModuleDef) :: Nil) => obj.name.moduleClassName case _ => nme.NO_NAME - } def extractTopLevelImports(ctx: Context): List[tpd.Import] = unfusedPhases(using ctx).collectFirst { case phase: CollectTopLevelImports => phase.imports }.get @@ -299,10 +280,9 @@ class ReplDriver(settings: Array[String], imports.foldLeft(ctx.fresh.setNewScope)((ctx, imp) => ctx.importContext(imp, imp.symbol(using ctx))) - given State = { + given State = val state0 = newRun(istate, parsed.reporter) state0.copy(context = state0.context.withSource(parsed.source)) - } compiler .compile(parsed) .fold( @@ -322,7 +302,7 @@ class ReplDriver(settings: Array[String], val warnings = newState.context.reporter .removeBufferedMessages(using newState.context) - inContext(newState.context) { + inContext(newState.context): val (updatedState, definitions) = if (!ctx.settings.XreplDisableDisplay.value) renderDefinitions(unit.tpdTree, newestWrapper)(using newStateWithImports) @@ -335,40 +315,35 @@ class ReplDriver(settings: Array[String], given Ordering[Diagnostic] = Ordering[(Int, Int, Int)].on(d => (d.pos.line, -d.level, d.pos.column)) - if (!quiet) { + if (!quiet) (definitions ++ warnings) .sorted .foreach(printDiagnostic) - } updatedState - } } ) - } - private def renderDefinitions(tree: tpd.Tree, newestWrapper: Name)(using state: State): (State, Seq[Diagnostic]) = { + private def renderDefinitions(tree: tpd.Tree, newestWrapper: Name)(using state: State): (State, Seq[Diagnostic]) = given Context = state.context - def resAndUnit(denot: Denotation) = { + def resAndUnit(denot: Denotation) = import scala.util.{Success, Try} val sym = denot.symbol val name = sym.name.show - val hasValidNumber = Try(name.drop(3).toInt) match { + val hasValidNumber = Try(name.drop(3).toInt) match case Success(num) => num < state.valIndex case _ => false - } name.startsWith(str.REPL_RES_PREFIX) && hasValidNumber && sym.info == defn.UnitType - } - def extractAndFormatMembers(symbol: Symbol): (State, Seq[Diagnostic]) = if (tree.symbol.info.exists) { + def extractAndFormatMembers(symbol: Symbol): (State, Seq[Diagnostic]) = if (tree.symbol.info.exists) val info = symbol.info val defs = info.bounds.hi.finalResultType .membersBasedOnFlags(required = Method, excluded = Accessor | ParamAccessor | Synthetic | Private) .filterNot { denot => defn.topClasses.contains(denot.symbol.owner) || denot.symbol.isConstructor - || denot.symbol.name.is(DefaultGetterName) + || denot.symbol.name.is(DefaultGetterName) } val vals = @@ -406,19 +381,17 @@ class ReplDriver(settings: Array[String], ++ renderedVals val diagnostics = if formattedMembers.isEmpty then rendering.forceModule(symbol) else formattedMembers (state.copy(valIndex = state.valIndex - vals.count(resAndUnit)), diagnostics) - } else (state, Seq.empty) def isSyntheticCompanion(sym: Symbol) = sym.is(Module) && sym.is(Synthetic) def typeDefs(sym: Symbol): Seq[Diagnostic] = sym.info.memberClasses - .collect { + .collect: case x if !isSyntheticCompanion(x.symbol) && !x.symbol.name.isReplWrapperName => rendering.renderTypeDef(x) - } - atPhase(typerPhase.next) { + atPhase(typerPhase.next): // Display members of wrapped module: tree.symbol.info.memberClasses .find(_.symbol.name == newestWrapper.moduleClassName) @@ -431,15 +404,12 @@ class ReplDriver(settings: Array[String], .map(d => new Diagnostic(d.msg.mapMsg(SyntaxHighlighting.highlight), d.pos, d.level)) (newState, highlighted) } - .getOrElse { + .getOrElse: // user defined a trait/class/object, so no module needed (state, Seq.empty) - } - } - } /** Interpret `cmd` to action and propagate potentially new `state` */ - private def interpretCommand(cmd: Command)(using state: State): State = cmd match { + private def interpretCommand(cmd: Command)(using state: State): State = cmd match case UnknownCommand(cmd) => out.println(s"""Unknown command: "$cmd", run ":help" for a list of commands""") state @@ -474,65 +444,58 @@ class ReplDriver(settings: Array[String], case Load(path) => val file = new JFile(path) - if (file.exists) { + if (file.exists) val contents = Using(scala.io.Source.fromFile(file, StandardCharsets.UTF_8.name))(_.mkString).get run(contents) - } - else { + else out.println(s"""Couldn't find file "${file.getCanonicalPath}"""") state - } case TypeOf(expr) => - expr match { + expr match case "" => out.println(s":type ") case _ => compiler.typeOf(expr)(using newRun(state)).fold( displayErrors, res => out.println(res) // result has some highlights ) - } state case DocOf(expr) => - expr match { + expr match case "" => out.println(s":doc ") case _ => compiler.docOf(expr)(using newRun(state)).fold( displayErrors, res => out.println(res) ) - } state case Settings(arg) => arg match - case "" => - given ctx: Context = state.context - for (s <- ctx.settings.userSetSettings(ctx.settingsState).sortBy(_.name)) - out.println(s"${s.name} = ${if s.value == "" then "\"\"" else s.value}") - state - case _ => - rootCtx = setupRootCtx(tokenize(arg).toArray, rootCtx) - state.copy(context = rootCtx) + case "" => + given ctx: Context = state.context + for (s <- ctx.settings.userSetSettings(ctx.settingsState).sortBy(_.name)) + out.println(s"${s.name} = ${if s.value == "" then "\"\"" else s.value}") + state + case _ => + rootCtx = setupRootCtx(tokenize(arg).toArray, rootCtx) + state.copy(context = rootCtx) case Quit => // end of the world! state - } /** shows all errors nicely formatted */ - private def displayErrors(errs: Seq[Diagnostic])(using state: State): State = { + private def displayErrors(errs: Seq[Diagnostic])(using state: State): State = errs.foreach(printDiagnostic) state - } /** Like ConsoleReporter, but without file paths, -Xprompt displaying, * and using a PrintStream rather than a PrintWriter so messages aren't re-encoded. */ - private object ReplConsoleReporter extends ConsoleReporter.AbstractConsoleReporter { + private object ReplConsoleReporter extends ConsoleReporter.AbstractConsoleReporter: override def posFileStr(pos: SourcePosition) = "" // omit file paths override def printMessage(msg: String): Unit = out.println(msg) override def flush()(using Context): Unit = out.flush() - } /** Print warnings & errors using ReplConsoleReporter, and info straight to out */ private def printDiagnostic(dia: Diagnostic)(using state: State) = dia.level match diff --git a/compiler/src/dotty/tools/repl/ScriptEngine.scala b/compiler/src/dotty/tools/repl/ScriptEngine.scala index 7d385daa43e4..58f6340cb188 100644 --- a/compiler/src/dotty/tools/repl/ScriptEngine.scala +++ b/compiler/src/dotty/tools/repl/ScriptEngine.scala @@ -18,7 +18,7 @@ import dotc.core.StdNames.str * val e = m.getEngineByName("scala") * println(e.eval("42")) */ -class ScriptEngine extends AbstractScriptEngine { +class ScriptEngine extends AbstractScriptEngine: private val driver = new ReplDriver( Array( "-classpath", "", // Avoid the default "." @@ -35,7 +35,7 @@ class ScriptEngine extends AbstractScriptEngine { /* Evaluate with the given context. */ @throws[ScriptException] - def eval(script: String, context: ScriptContext): Object = { + def eval(script: String, context: ScriptContext): Object = val vid = state.valIndex state = driver.run(script)(using state) val oid = state.objectIndex @@ -43,30 +43,26 @@ class ScriptEngine extends AbstractScriptEngine { .getDeclaredMethods.find(_.getName == s"${str.REPL_RES_PREFIX}$vid") .map(_.invoke(null)) .getOrElse(null) - } @throws[ScriptException] def eval(reader: Reader, context: ScriptContext): Object = eval(stringFromReader(reader), context) private val buffer = new Array[Char](8192) - def stringFromReader(in: Reader) = { + def stringFromReader(in: Reader) = val out = new StringWriter var n = in.read(buffer) - while (n > -1) { + while (n > -1) out.write(buffer, 0, n) n = in.read(buffer) - } in.close out.toString - } -} -object ScriptEngine { +object ScriptEngine: import java.util.Arrays import scala.util.Properties - class Factory extends ScriptEngineFactory { + class Factory extends ScriptEngineFactory: def getEngineName = "Scala REPL" def getEngineVersion = "3.0" def getExtensions = Arrays.asList("scala") @@ -79,17 +75,14 @@ object ScriptEngine { def getOutputStatement(toDisplay: String) = s"""print("$toDisplay")""" - def getParameter(key: String): Object = key match { + def getParameter(key: String): Object = key match case JScriptEngine.ENGINE => getEngineName case JScriptEngine.ENGINE_VERSION => getEngineVersion case JScriptEngine.LANGUAGE => getLanguageName case JScriptEngine.LANGUAGE_VERSION => getLanguageVersion case JScriptEngine.NAME => getNames.get(0) case _ => null - } def getProgram(statements: String*) = statements.mkString("; ") def getScriptEngine: JScriptEngine = new ScriptEngine - } -} diff --git a/compiler/src/dotty/tools/repl/package.scala b/compiler/src/dotty/tools/repl/package.scala index b780d877e57a..67f7e3fc4169 100644 --- a/compiler/src/dotty/tools/repl/package.scala +++ b/compiler/src/dotty/tools/repl/package.scala @@ -2,9 +2,8 @@ package dotty.tools import dotc.reporting.{HideNonSensicalMessages, StoreReporter, UniqueMessagePositions} -package object repl { +package object repl: /** Create empty outer store reporter */ private[repl] def newStoreReporter: StoreReporter = new StoreReporter(null) with UniqueMessagePositions with HideNonSensicalMessages -} diff --git a/compiler/src/dotty/tools/repl/results.scala b/compiler/src/dotty/tools/repl/results.scala index 96d733975a4e..6b20026fb1a4 100644 --- a/compiler/src/dotty/tools/repl/results.scala +++ b/compiler/src/dotty/tools/repl/results.scala @@ -6,7 +6,7 @@ import dotc.reporting.Diagnostic /** Contains the different data and type structures used to model results * in the REPL */ -object results { +object results: /** Type alias for `List[Diagnostic]` */ type Errors = List[Diagnostic] @@ -20,4 +20,3 @@ object results { extension [A](xs: Errors) def errors: Result[A] = scala.util.Left(xs) -} diff --git a/compiler/src/dotty/tools/runner/ObjectRunner.scala b/compiler/src/dotty/tools/runner/ObjectRunner.scala index cb8f9d791dfa..7b56766ae910 100644 --- a/compiler/src/dotty/tools/runner/ObjectRunner.scala +++ b/compiler/src/dotty/tools/runner/ObjectRunner.scala @@ -10,7 +10,7 @@ import java.util.concurrent.ExecutionException /** * This is a copy implementation from scala/scala scala.tools.nsc.CommonRunner trait */ -trait CommonRunner { +trait CommonRunner: /** Run a given object, specified by name, using a * specified classpath and argument list. * @@ -18,10 +18,9 @@ trait CommonRunner { * @throws java.lang.NoSuchMethodException * @throws java.lang.reflect.InvocationTargetException */ - def run(urls: Seq[URL], objectName: String, arguments: Seq[String]): Unit = { + def run(urls: Seq[URL], objectName: String, arguments: Seq[String]): Unit = import RichClassLoader._ ScalaClassLoader.fromURLsParallelCapable(urls).run(objectName, arguments) - } /** Catches any non-fatal exception thrown by run (in the case of InvocationTargetException, * unwrapping it) and returns it in an Option. @@ -30,7 +29,7 @@ trait CommonRunner { try { run(urls, objectName, arguments) ; None } catch { case NonFatal(e) => Some(rootCause(e)) } - private def rootCause(x: Throwable): Throwable = x match { + private def rootCause(x: Throwable): Throwable = x match case _: InvocationTargetException | _: ExceptionInInitializerError | _: UndeclaredThrowableException | @@ -38,8 +37,6 @@ trait CommonRunner { if x.getCause != null => rootCause(x.getCause.nn) case _ => x - } -} /** An object that runs another object specified by name. * diff --git a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala index 9ec0199abcbb..1cc33c06445a 100644 --- a/compiler/src/dotty/tools/runner/ScalaClassLoader.scala +++ b/compiler/src/dotty/tools/runner/ScalaClassLoader.scala @@ -13,7 +13,7 @@ import scala.annotation.internal.sharable import scala.annotation.tailrec import scala.util.control.Exception.catching -final class RichClassLoader(private val self: ClassLoader) extends AnyVal { +final class RichClassLoader(private val self: ClassLoader) extends AnyVal: /** Execute an action with this classloader as context classloader. */ private def asContext[T](action: => T): T = ScalaClassLoader.asContext(self)(action) @@ -28,16 +28,15 @@ final class RichClassLoader(private val self: ClassLoader) extends AnyVal { Class.forName(path, initialize, self).asInstanceOf[Class[T]] /** Run the main method of a class to be loaded by this classloader */ - def run(objectName: String, arguments: Seq[String]): Unit = { + def run(objectName: String, arguments: Seq[String]): Unit = val clsToRun = tryToInitializeClass(objectName).getOrElse(throw new ClassNotFoundException(objectName)) val method = clsToRun.getMethod("main", classOf[Array[String]]) if !Modifier.isStatic(method.getModifiers) then throw new NoSuchMethodException(s"$objectName.main is not static") try asContext(method.invoke(null, Array(arguments.toArray: AnyRef): _*)) catch unwrapHandler({ case ex => throw ex }) - } - @tailrec private def unwrapThrowable(x: Throwable): Throwable = x match { + @tailrec private def unwrapThrowable(x: Throwable): Throwable = x match case _: InvocationTargetException | // thrown by reflectively invoked method or constructor _: ExceptionInInitializerError | // thrown when running a static initializer (e.g. a scala module constructor) _: UndeclaredThrowableException | // invocation on a proxy instance if its invocation handler's `invoke` throws an exception @@ -46,19 +45,16 @@ final class RichClassLoader(private val self: ClassLoader) extends AnyVal { if x.getCause != null => unwrapThrowable(x.getCause) case _ => x - } // Transforms an exception handler into one which will only receive the unwrapped // exceptions (for the values of wrap covered in unwrapThrowable.) private def unwrapHandler[T](pf: PartialFunction[Throwable, T]): PartialFunction[Throwable, T] = pf.compose({ case ex => unwrapThrowable(ex) }) -} -object RichClassLoader { +object RichClassLoader: implicit def wrapClassLoader(loader: ClassLoader): RichClassLoader = new RichClassLoader(loader) -} -object ScalaClassLoader { +object ScalaClassLoader: def setContext(cl: ClassLoader) = Thread.currentThread.setContextClassLoader(cl) def fromURLsParallelCapable(urls: Seq[URL], parent: ClassLoader | Null = null): URLClassLoader = @@ -79,4 +75,3 @@ object ScalaClassLoader { setContext(classLoader) action finally setContext(saved) -} diff --git a/compiler/src/dotty/tools/scripting/Main.scala b/compiler/src/dotty/tools/scripting/Main.scala index 8db12f400c64..c04a86a467bc 100755 --- a/compiler/src/dotty/tools/scripting/Main.scala +++ b/compiler/src/dotty/tools/scripting/Main.scala @@ -20,7 +20,7 @@ object Main: val scriptArgs = rest.drop(2) var saveJar = false var invokeFlag = true // by default, script main method is invoked - val compilerArgs = leftArgs.filter { + val compilerArgs = leftArgs.filter: case "-save" | "-savecompiled" => saveJar = true false @@ -29,7 +29,6 @@ object Main: false case _ => true - } (compilerArgs, file, scriptArgs, saveJar, invokeFlag) end distinguishArgs @@ -54,10 +53,9 @@ object Main: private def writeJarfile(outDir: Path, scriptFile: File, scriptArgs:Array[String], classpathEntries:Seq[Path], mainClassName: String): Unit = - val jarTargetDir: Path = Option(scriptFile.toPath.toAbsolutePath.getParent) match { + val jarTargetDir: Path = Option(scriptFile.toPath.toAbsolutePath.getParent) match case None => sys.error(s"no parent directory for script file [$scriptFile]") case Some(parent) => parent - } def scriptBasename = scriptFile.getName.takeWhile(_!='.') val jarPath = s"$jarTargetDir/$scriptBasename.jar" @@ -82,14 +80,13 @@ object Main: def pathsep = sys.props("path.separator") - extension(path: String) { + extension(path: String) // Normalize path separator, convert relative path to absolute def norm: String = - path.replace('\\', '/') match { + path.replace('\\', '/') match case s if s.secondChar == ":" => s case s if s.startsWith("./") => s.drop(2) case s => s - } // convert to absolute path relative to cwd. def absPath: String = norm match @@ -102,6 +99,5 @@ object Main: // Windows java.io.File#isAbsolute treats them as relative. def isAbsolute = path.norm.startsWith("/") || (isWin && path.secondChar == ":") def secondChar: String = path.take(2).drop(1).mkString("") - } lazy val userDir = sys.props("user.dir").norm diff --git a/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala b/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala index 5fac91124187..56277a5c9ec0 100644 --- a/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/ExprImpl.scala @@ -10,14 +10,12 @@ import dotty.tools.dotc.ast.tpd * * May contain references to code defined outside this Expr instance. */ -final class ExprImpl(val tree: tpd.Tree, val scope: Scope) extends Expr[Any] { - override def equals(that: Any): Boolean = that match { +final class ExprImpl(val tree: tpd.Tree, val scope: Scope) extends Expr[Any]: + override def equals(that: Any): Boolean = that match case that: ExprImpl => // Expr are wrappers around trees, therefore they are equals if their trees are equal. // All scope should be equal unless two different runs of the compiler created the trees. tree == that.tree && scope == that.scope case _ => false - } override def hashCode(): Int = tree.hashCode() -} diff --git a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala index 6477c8da1578..e13c4a5e654c 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuoteMatcher.scala @@ -102,7 +102,7 @@ import dotty.tools.dotc.util.optional * * ``` */ -object QuoteMatcher { +object QuoteMatcher: import tpd.* // TODO use flag from Context. Maybe -debug or add -debug-macros @@ -127,9 +127,9 @@ object QuoteMatcher { /** Evaluate the result of pattern matching against a quote pattern. * Implementation of the runtime of `QuoteMatching.{ExprMatch, TypeMatch}.unapply`. */ - def treeMatch(scrutinee: Tree, pattern: Tree)(using Context): Option[Tuple] = { + def treeMatch(scrutinee: Tree, pattern: Tree)(using Context): Option[Tuple] = val (pat1, typeHoles, ctx1) = instrumentTypeHoles(pattern) - inContext(ctx1) { + inContext(ctx1): optional { given Env = Map.empty scrutinee =?= pat1 @@ -143,19 +143,16 @@ object QuoteMatcher { val matchedExprs = val typeHoleMap: Type => Type = if typeHoles.isEmpty then identity - else new TypeMap { + else new TypeMap: private val typeHoleMapping = Map(typeHoles.zip(typeHoleApproximations)*) def apply(tp: Type): Type = tp match case TypeRef(NoPrefix, _) => typeHoleMapping.getOrElse(tp.typeSymbol, tp) case _ => mapOver(tp) - } if matchings.isEmpty then Nil else matchings.map(_.toExpr(typeHoleMap, spliceScope)) val results = matchedTypes ++ matchedExprs Tuple.fromIArray(IArray.unsafeFromArray(results.toArray)) } - } - } def instrumentTypeHoles(pat: Tree)(using Context): (Tree, List[Symbol], Context) = def isTypeHoleDef(tree: Tree): Boolean = tree match @@ -181,11 +178,10 @@ object QuoteMatcher { if fromAboveAnnot then fullBounds.nn.hi else fullBounds.nn.lo /** Check that all trees match with `mtch` and concatenate the results with &&& */ - private def matchLists[T](l1: List[T], l2: List[T])(mtch: (T, T) => MatchingExprs): optional[MatchingExprs] = (l1, l2) match { + private def matchLists[T](l1: List[T], l2: List[T])(mtch: (T, T) => MatchingExprs): optional[MatchingExprs] = (l1, l2) match case (x :: xs, y :: ys) => mtch(x, y) &&& matchLists(xs, ys)(mtch) case (Nil, Nil) => matched case _ => notMatched - } extension (scrutinees: List[Tree]) private def =?= (patterns: List[Tree])(using Env, Context): optional[MatchingExprs] = @@ -206,7 +202,7 @@ object QuoteMatcher { /* Match block flattening */ // TODO move to cases /** Normalize the tree */ - def normalize(tree: Tree): Tree = tree match { + def normalize(tree: Tree): Tree = tree match case Block(Nil, expr) => normalize(expr) case Block(stats1, Block(stats2, expr)) => expr match @@ -214,17 +210,15 @@ object QuoteMatcher { case _ => normalize(Block(stats1 ::: stats2, expr)) case Inlined(_, Nil, expr) => normalize(expr) case _ => tree - } val scrutinee = normalize(scrutinee0) val pattern = normalize(pattern0) /** Check that both are `val` or both are `lazy val` or both are `var` **/ - def checkValFlags(): Boolean = { + def checkValFlags(): Boolean = val sFlags = scrutinee.symbol.flags val pFlags = pattern.symbol.flags sFlags.is(Lazy) == pFlags.is(Lazy) && sFlags.is(Mutable) == pFlags.is(Mutable) - } // TODO remove object TypeTreeTypeTest: @@ -262,11 +256,10 @@ object QuoteMatcher { val env = summon[Env] val capturedArgs = args.map(_.symbol) val captureEnv = env.filter((k, v) => !capturedArgs.contains(v)) - withEnv(captureEnv) { + withEnv(captureEnv): scrutinee match case ClosedPatternTerm(scrutinee) => matchedOpen(scrutinee, pattern.tpe, args, env) case _ => notMatched - } /* Match type ascription (b) */ case Typed(expr2, _) => @@ -321,15 +314,13 @@ object QuoteMatcher { case Block(stat1 :: stats1, expr1) => pattern match case Block(stat2 :: stats2, expr2) => - val newEnv = (stat1, stat2) match { + val newEnv = (stat1, stat2) match case (stat1: MemberDef, stat2: MemberDef) => summon[Env] + (stat1.symbol -> stat2.symbol) case _ => summon[Env] - } - withEnv(newEnv) { + withEnv(newEnv): stat1 =?= stat2 &&& Block(stats1, expr1) =?= Block(stats2, expr2) - } case _ => notMatched /* Match if */ @@ -472,21 +463,19 @@ object QuoteMatcher { || summon[Env].get(devirtualizedScrutinee).contains(pattern) || devirtualizedScrutinee.allOverriddenSymbols.contains(pattern) - private object ClosedPatternTerm { + private object ClosedPatternTerm: /** Matches a term that does not contain free variables defined in the pattern (i.e. not defined in `Env`) */ def unapply(term: Tree)(using Env, Context): Option[term.type] = if freePatternVars(term).isEmpty then Some(term) else None /** Return all free variables of the term defined in the pattern (i.e. defined in `Env`) */ def freePatternVars(term: Tree)(using Env, Context): Set[Symbol] = - val accumulator = new TreeAccumulator[Set[Symbol]] { + val accumulator = new TreeAccumulator[Set[Symbol]]: def apply(x: Set[Symbol], tree: Tree)(using Context): Set[Symbol] = tree match case tree: Ident if summon[Env].contains(tree.symbol) => foldOver(x + tree.symbol, tree) case _ => foldOver(x, tree) - } accumulator.apply(Set.empty, term) - } enum MatchResult: /** Closed pattern extracted value @@ -514,14 +503,13 @@ object QuoteMatcher { case MatchResult.ClosedTree(tree) => new ExprImpl(tree, spliceScope) case MatchResult.OpenTree(tree, patternTpe, args, env) => - val names: List[TermName] = args.map { + val names: List[TermName] = args.map: case Block(List(DefDef(nme.ANON_FUN, _, _, Apply(Ident(name), _))), _) => name.asTermName case arg => arg.symbol.name.asTermName - } val paramTypes = args.map(x => mapTypeHoles(x.tpe.widenTermRefExpr)) val methTpe = MethodType(names)(_ => paramTypes, _ => mapTypeHoles(patternTpe)) val meth = newAnonFun(ctx.owner, methTpe) - def bodyFn(lambdaArgss: List[List[Tree]]): Tree = { + def bodyFn(lambdaArgss: List[List[Tree]]): Tree = val argsMap = args.view.map(_.symbol).zip(lambdaArgss.head).toMap val body = new TreeMap { override def transform(tree: Tree)(using Context): Tree = @@ -530,7 +518,6 @@ object QuoteMatcher { case tree => super.transform(tree) }.transform(tree) TreeOps(body).changeNonLocalOwners(meth) - } val hoasClosure = Closure(meth, bodyFn) new ExprImpl(hoasClosure, spliceScope) @@ -551,4 +538,3 @@ object QuoteMatcher { def &&& (that: MatchingExprs): MatchingExprs = self ++ that end extension -} diff --git a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala index da4aba39cfbe..b2b066735084 100644 --- a/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/QuotesImpl.scala @@ -23,7 +23,7 @@ import scala.quoted.runtime.impl.printers._ import scala.reflect.TypeTest -object QuotesImpl { +object QuotesImpl: def apply()(using Context): Quotes = new QuotesImpl @@ -34,7 +34,6 @@ object QuotesImpl { if ctx.settings.color.value == "always" then TreeAnsiCode.show(tree) else TreeCode.show(tree) -} class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler, QuoteMatching: @@ -63,7 +62,7 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler reflect.TypeReprMethods.<:<(reflect.asTerm(self).tpe)(reflect.TypeRepr.of[X]) /** Convert this to an `quoted.Expr[X]` if this expression is a valid expression of type `X` or throws */ - def asExprOf[X](using scala.quoted.Type[X]): scala.quoted.Expr[X] = { + def asExprOf[X](using scala.quoted.Type[X]): scala.quoted.Expr[X] = if self.isExprOf[X] then self.asInstanceOf[scala.quoted.Expr[X]] else @@ -73,7 +72,6 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler |did not conform to type: ${reflect.Printer.TypeReprCode.show(reflect.TypeRepr.of[X])} |""".stripMargin ) - } end extension object reflect extends reflectModule: @@ -235,21 +233,19 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler val ctr = ctx.typeAssigner.assignType(untpdCtr, cls.primaryConstructor) tpd.ClassDefWithParents(cls.asClass, ctr, parents, body) - def copy(original: Tree)(name: String, constr: DefDef, parents: List[Tree], selfOpt: Option[ValDef], body: List[Statement]): ClassDef = { + def copy(original: Tree)(name: String, constr: DefDef, parents: List[Tree], selfOpt: Option[ValDef], body: List[Statement]): ClassDef = val dotc.ast.Trees.TypeDef(_, originalImpl: tpd.Template) = original: @unchecked tpd.cpy.TypeDef(original)(name.toTypeName, tpd.cpy.Template(originalImpl)(constr, parents, derived = Nil, selfOpt.getOrElse(tpd.EmptyValDef), body)) - } def unapply(cdef: ClassDef): (String, DefDef, List[Tree /* Term | TypeTree */], Option[ValDef], List[Statement]) = val rhs = cdef.rhs.asInstanceOf[tpd.Template] (cdef.name.toString, cdef.constructor, cdef.parents, cdef.self, rhs.body) - def module(module: Symbol, parents: List[Tree /* Term | TypeTree */], body: List[Statement]): (ValDef, ClassDef) = { + def module(module: Symbol, parents: List[Tree /* Term | TypeTree */], body: List[Statement]): (ValDef, ClassDef) = val cls = module.moduleClass val clsDef = ClassDef(cls, parents, body) val newCls = Apply(Select(New(TypeIdent(cls)), cls.primaryConstructor), Nil) val modVal = ValDef(module, Some(newCls)) (modVal, clsDef) - } end ClassDef given ClassDefMethods: ClassDefMethods with @@ -396,17 +392,15 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def tpe: TypeRepr = self.tpe.widenSkolem def underlyingArgument: Term = new tpd.TreeOps(self).underlyingArgument def underlying: Term = new tpd.TreeOps(self).underlying - def etaExpand(owner: Symbol): Term = self.tpe.widen match { + def etaExpand(owner: Symbol): Term = self.tpe.widen match case mtpe: Types.MethodType if !mtpe.isParamDependent => - val closureResType = mtpe.resType match { + val closureResType = mtpe.resType match case t: Types.MethodType => t.toFunctionType(isJava = self.symbol.is(dotc.core.Flags.JavaDefined)) case t => t - } val closureTpe = Types.MethodType(mtpe.paramNames, mtpe.paramInfos, closureResType) val closureMethod = dotc.core.Symbols.newAnonFun(owner, closureTpe) tpd.Closure(closureMethod, tss => new tpd.TreeOps(self).appliedToTermArgs(tss.head).etaExpand(closureMethod)) case _ => self - } def appliedTo(arg: Term): Term = self.appliedToArgs(arg :: Nil) @@ -831,12 +825,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler val meth = dotc.core.Symbols.newAnonFun(owner, tpe) withDefaultPos(tpd.Closure(meth, tss => xCheckedMacroOwners(xCheckMacroValidExpr(rhsFn(meth, tss.head.map(withDefaultPos))), meth))) - def unapply(tree: Block): Option[(List[ValDef], Term)] = tree match { + def unapply(tree: Block): Option[(List[ValDef], Term)] = tree match case Block((ddef @ DefDef(_, tpd.ValDefs(params) :: Nil, _, Some(body))) :: Nil, Closure(meth, _)) if ddef.symbol == meth.symbol => Some((params, body)) case _ => None - } end Lambda type If = tpd.If @@ -1675,13 +1668,12 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler type OmitSelector = untpd.ImportSelector object OmitSelectorTypeTest extends TypeTest[Selector, OmitSelector]: - def unapply(x: Selector): Option[OmitSelector & x.type] = x match { + def unapply(x: Selector): Option[OmitSelector & x.type] = x match case self: (untpd.ImportSelector & x.type) => self.renamed match case dotc.ast.Trees.Ident(nme.WILDCARD) => Some(self) case _ => None case _ => None - } end OmitSelectorTypeTest object OmitSelector extends OmitSelectorModule: @@ -1698,10 +1690,9 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler type GivenSelector = untpd.ImportSelector object GivenSelectorTypeTest extends TypeTest[Selector, GivenSelector]: - def unapply(x: Selector): Option[GivenSelector & x.type] = x match { + def unapply(x: Selector): Option[GivenSelector & x.type] = x match case self: (untpd.ImportSelector & x.type) if x.isGiven => Some(self) case _ => None - } end GivenSelectorTypeTest object GivenSelector extends GivenSelectorModule: @@ -2592,11 +2583,10 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def docstring: Option[String] = import dotc.core.Comments.CommentsContext - val docCtx = ctx.docCtx.getOrElse { + val docCtx = ctx.docCtx.getOrElse: throw new RuntimeException( "DocCtx could not be found and documentations are unavailable. This is a compiler-internal error." ) - } docCtx.docstring(self).map(_.raw) def tree: Tree = FromSymbol.definitionFromSym(self) @@ -2608,10 +2598,9 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler self.denot.getAnnotation(annotSym).map(_.tree) def annotations: List[Term] = - self.denot.annotations.flatMap { + self.denot.annotations.flatMap: case _: dotc.core.Annotations.BodyAnnotation => Nil case annot => annot.tree :: Nil - } def isDefinedInCurrentRun: Boolean = self.exists && self.topLevelClass.asClass.isDefinedInCurrentRun @@ -2715,9 +2704,8 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler def caseFields: List[Symbol] = if !self.isClass then Nil - else self.asClass.paramAccessors.collect { + else self.asClass.paramAccessors.collect: case sym if sym.is(dotc.core.Flags.CaseAccessor) => sym.asTerm - } def isTypeParam: Boolean = self.isTypeParam def signature: Signature = self.signature @@ -2756,12 +2744,11 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler given SignatureMethods: SignatureMethods with extension (self: Signature) def paramSigs: List[String | Int] = - self.paramsSig.map { + self.paramsSig.map: case paramSig: dotc.core.Names.TypeName => paramSig.toString case paramSig: Int => paramSig - } def resultSig: String = self.resSig.toString end extension @@ -2921,14 +2908,13 @@ class QuotesImpl private (using val ctx: Context) extends Quotes, QuoteUnpickler type SourceFile = dotc.util.SourceFile - object SourceFile extends SourceFileModule { + object SourceFile extends SourceFileModule: def current: SourceFile = val unit = ctx.compilationUnit if unit eq NoCompilationUnit then throw new java.lang.UnsupportedOperationException( "`reflect.SourceFile.current` cannot be called within the TASTy ispector") else unit.source - } given SourceFileMethods: SourceFileMethods with extension (self: SourceFile) diff --git a/compiler/src/scala/quoted/runtime/impl/ScopeException.scala b/compiler/src/scala/quoted/runtime/impl/ScopeException.scala index d65328bb5405..2869781c6ccc 100644 --- a/compiler/src/scala/quoted/runtime/impl/ScopeException.scala +++ b/compiler/src/scala/quoted/runtime/impl/ScopeException.scala @@ -21,7 +21,7 @@ object ScopeException: |Splice stack: |${scope.stack.mkString("\t", "\n\t", "\n")} """.stripMargin - else + else s"""Expression created in a splice was used outside of that splice. |Created in: $scope |Used in: $currentScope diff --git a/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala b/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala index 797b38be2743..56b7289bb971 100644 --- a/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala +++ b/compiler/src/scala/quoted/runtime/impl/SpliceScope.scala @@ -10,7 +10,7 @@ import dotty.tools.dotc.util.SourcePosition * A nested splice gets a new scope with the enclosing scope as its `outer`. * This also applies for recursive splices. */ -trait Scope { +trait Scope: /** Outer scope that was used to create the quote containing this splice. * NoScope otherwise. */ @@ -26,7 +26,6 @@ trait Scope { this.toString :: (if outer.eq(NoScope) then Nil else outer.stack) /** If the two scopes correspond to the same splice in source. */ def atSameLocation(scope: Scope): Boolean = false -} /** Only used for outer scope of top level splice and staging `run` */ object NoScope extends Scope: diff --git a/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala b/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala index d4cea83efde8..b84040a1e015 100644 --- a/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala +++ b/compiler/src/scala/quoted/runtime/impl/TypeImpl.scala @@ -4,14 +4,12 @@ package runtime.impl import dotty.tools.dotc.ast.tpd /** Quoted type (or kind) `T` backed by a tree */ -final class TypeImpl(val typeTree: tpd.Tree, val scope: Scope) extends Type[?] { - override def equals(that: Any): Boolean = that match { +final class TypeImpl(val typeTree: tpd.Tree, val scope: Scope) extends Type[?]: + override def equals(that: Any): Boolean = that match case that: TypeImpl => typeTree == // TastyTreeExpr are wrappers around trees, therefore they are equals if their trees are equal. // All scope should be equal unless two different runs of the compiler created the trees. that.typeTree && scope == that.scope case _ => false - } override def hashCode(): Int = typeTree.hashCode() -} diff --git a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala index c229338ad228..6e96bc3b7b23 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/Extractors.scala @@ -3,7 +3,7 @@ package runtime.impl.printers import scala.quoted._ -object Extractors { +object Extractors: def showTree(using Quotes)(tree: quotes.reflect.Tree): String = new ExtractorsPrinter[quotes.type]().visitTree(tree).result() @@ -17,7 +17,7 @@ object Extractors { def showSymbol(using Quotes)(symbol: quotes.reflect.Symbol): String = new ExtractorsPrinter[quotes.type]().visitSymbol(symbol).result() - def showFlags(using Quotes)(flags: quotes.reflect.Flags): String = { + def showFlags(using Quotes)(flags: quotes.reflect.Flags): String = import quotes.reflect._ val flagList = List.newBuilder[String] if (flags.is(Flags.Abstract)) flagList += "Flags.Abstract" @@ -61,7 +61,6 @@ object Extractors { if (flags.is(Flags.Trait)) flagList += "Flags.Trait" if (flags.is(Flags.Transparent)) flagList += "Flags.Transparent" flagList.result().mkString(" | ") - } private class ExtractorsPrinter[Q <: Quotes & Singleton](using val quotes: Q) { self => import quotes.reflect._ @@ -70,7 +69,7 @@ object Extractors { def result(): String = sb.result() - def visitTree(x: Tree): this.type = x match { + def visitTree(x: Tree): this.type = x match case tree: Ref => tree match case Wildcard() => @@ -177,9 +176,8 @@ object Extractors { this += "Alternatives(" ++= patterns += ")" case TypedOrTest(tree, tpt) => this += "TypedOrTest(" += tree += ", " += tpt += ")" - } - def visitConstant(x: Constant): this.type = x match { + def visitConstant(x: Constant): this.type = x match case UnitConstant() => this += "UnitConstant()" case NullConstant() => this += "NullConstant()" case BooleanConstant(value) => this += "BooleanConstant(" += value += ")" @@ -194,9 +192,8 @@ object Extractors { case ClassOfConstant(value) => this += "ClassOfConstant(" visitType(value) += ")" - } - def visitType(x: TypeRepr): this.type = x match { + def visitType(x: TypeRepr): this.type = x match case ConstantType(value) => this += "ConstantType(" += value += ")" case TermRef(qual, name) => @@ -239,19 +236,16 @@ object Extractors { this += "NoPrefix()" case MatchCase(pat, rhs) => this += "MatchCase(" += pat += ", " += rhs += ")" - } - def visitSignature(sig: Signature): this.type = { + def visitSignature(sig: Signature): this.type = val Signature(params, res) = sig this += "Signature(" ++= params.map(_.toString) += ", " += res += ")" - } - def visitSelector(sel: Selector): this.type = sel match { + def visitSelector(sel: Selector): this.type = sel match case SimpleSelector(id) => this += "SimpleSelector(" += id += ")" case RenameSelector(id1, id2) => this += "RenameSelector(" += id1 += ", " += id2 += ")" case OmitSelector(id) => this += "OmitSelector(" += id += ")" case GivenSelector(bound) => this += "GivenSelector(" += bound += ")" - } def visitSymbol(x: Symbol): this.type = if x.isPackageDef then this += "IsPackageDefSymbol(<" += x.fullName += ">)" @@ -278,68 +272,56 @@ object Extractors { def ++=(xs: List[String]): this.type = visitList[String](xs, +=) - private implicit class StringOps(buff: self.type) { + private implicit class StringOps(buff: self.type): def +=(x: Option[String]): self.type = { visitOption(x, y => buff += "\"" += y += "\""); buff } - } - private implicit class TreeOps(buff: self.type) { + private implicit class TreeOps(buff: self.type): def +=(x: Tree): self.type = { visitTree(x); buff } def +=(x: Option[Tree]): self.type = { visitOption(x, visitTree); buff } def ++=(x: List[Tree]): self.type = { visitList(x, visitTree); buff } def +++=(x: List[List[Tree]]): self.type = { visitList(x, ++=); buff } - } - private implicit class ConstantOps(buff: self.type) { + private implicit class ConstantOps(buff: self.type): def +=(x: Constant): self.type = { visitConstant(x); buff } - } - private implicit class TypeOps(buff: self.type) { + private implicit class TypeOps(buff: self.type): def +=(x: TypeRepr): self.type = { visitType(x); buff } def +=(x: Option[TypeRepr]): self.type = { visitOption(x, visitType); buff } def ++=(x: List[TypeRepr]): self.type = { visitList(x, visitType); buff } - } - private implicit class SignatureOps(buff: self.type) { + private implicit class SignatureOps(buff: self.type): def +=(x: Option[Signature]): self.type = { visitOption(x, visitSignature); buff } - } - private implicit class SelectorOps(buff: self.type) { + private implicit class SelectorOps(buff: self.type): def ++=(x: List[Selector]): self.type = { visitList(x, visitSelector); buff } - } - private implicit class SymbolOps(buff: self.type) { + private implicit class SymbolOps(buff: self.type): def +=(x: Symbol): self.type = { visitSymbol(x); buff } - } - private implicit class ParamClauseOps(buff: self.type) { + private implicit class ParamClauseOps(buff: self.type): def ++=(x: List[ParamClause]): self.type = { visitList(x, visitParamClause); buff } - } - private def visitOption[U](opt: Option[U], visit: U => this.type): this.type = opt match { + private def visitOption[U](opt: Option[U], visit: U => this.type): this.type = opt match case Some(x) => this += "Some(" visit(x) this += ")" case _ => this += "None" - } - private def visitList[U](list: List[U], visit: U => this.type): this.type = list match { + private def visitList[U](list: List[U], visit: U => this.type): this.type = list match case x0 :: xs => this += "List(" visit(x0) - def visitNext(xs: List[U]): Unit = xs match { + def visitNext(xs: List[U]): Unit = xs match case y :: ys => this += ", " visit(y) visitNext(ys) case Nil => - } visitNext(xs) this += ")" case Nil => this += "Nil" - } } -} diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala index a6a773adc9ba..b507e3cb2600 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SourceCode.scala @@ -4,7 +4,7 @@ package runtime.impl.printers import scala.annotation.switch /** Printer for fully elaborated representation of the source code */ -object SourceCode { +object SourceCode: def showTree(using Quotes)(tree: quotes.reflect.Tree)(syntaxHighlight: SyntaxHighlight, fullNames: Boolean): String = new SourceCodePrinter[quotes.type](syntaxHighlight, fullNames).printTree(tree).result() @@ -18,7 +18,7 @@ object SourceCode { def showSymbol(using Quotes)(symbol: quotes.reflect.Symbol)(syntaxHighlight: SyntaxHighlight): String = symbol.fullName - def showFlags(using Quotes)(flags: quotes.reflect.Flags)(syntaxHighlight: SyntaxHighlight): String = { + def showFlags(using Quotes)(flags: quotes.reflect.Flags)(syntaxHighlight: SyntaxHighlight): String = import quotes.reflect._ val flagList = List.newBuilder[String] if (flags.is(Flags.Abstract)) flagList += "abstract" @@ -61,48 +61,42 @@ object SourceCode { if (flags.is(Flags.Trait)) flagList += "trait" if (flags.is(Flags.Transparent)) flagList += "transparent" flagList.result().mkString("/*", " ", "*/") - } - private class SourceCodePrinter[Q <: Quotes & Singleton](syntaxHighlight: SyntaxHighlight, fullNames: Boolean)(using val quotes: Q) { + private class SourceCodePrinter[Q <: Quotes & Singleton](syntaxHighlight: SyntaxHighlight, fullNames: Boolean)(using val quotes: Q): import syntaxHighlight._ import quotes.reflect._ private[this] val sb: StringBuilder = new StringBuilder private[this] var indent: Int = 0 - private def indented(printIndented: => Unit): Unit = { + private def indented(printIndented: => Unit): Unit = indent += 1 printIndented indent -= 1 - } - private def inParens(body: => Unit): this.type = { + private def inParens(body: => Unit): this.type = this += "(" body this += ")" - } - private def inSquare(body: => Unit): this.type = { + private def inSquare(body: => Unit): this.type = this += "[" body this += "]" - } - private def inBlock(body: => Unit): this.type = { + private def inBlock(body: => Unit): this.type = this += " {" - indented { + indented: this += lineBreak() body - } this += lineBreak() += "}" - } def result(): String = sb.result() private def lineBreak(): String = "\n" + (" " * indent) private def doubleLineBreak(): String = "\n\n" + (" " * indent) - def printTree(tree: Tree)(using elideThis: Option[Symbol] = None): this.type = tree match { + def printTree(tree: Tree)(using elideThis: Option[Symbol] = None): this.type = tree match case PackageObject(body)=> printTree(body) // Print package object @@ -111,19 +105,17 @@ object SourceCode { printTree(inner) case tree @ PackageClause(name, stats) => - val stats1 = stats.collect { + val stats1 = stats.collect: case stat: PackageClause => stat case stat: Definition if !(stat.symbol.flags.is(Flags.Module) && stat.symbol.flags.is(Flags.Lazy)) => stat case stat @ (_:Import | _:Export) => stat - } - name match { + name match case Ident("") => printTrees(stats1, lineBreak()) case _ => this += "package " printType(name.tpe) inBlock(printTrees(stats1, lineBreak())) - } case Import(expr, selectors) => this += "import " @@ -146,33 +138,30 @@ object SourceCode { if (flags.is(Flags.Final) && !flags.is(Flags.Module)) this += highlightKeyword("final ") if (flags.is(Flags.Case)) this += highlightKeyword("case ") - if (name == "package$") { + if (name == "package$") this += highlightKeyword("package object ") += highlightTypeDef(cdef.symbol.owner.name.stripSuffix("$")) - } else if (flags.is(Flags.Module)) this += highlightKeyword("object ") += highlightTypeDef(name.stripSuffix("$")) else if (flags.is(Flags.Trait)) this += highlightKeyword("trait ") += highlightTypeDef(name) else if (flags.is(Flags.Abstract)) this += highlightKeyword("abstract class ") += highlightTypeDef(name) else this += highlightKeyword("class ") += highlightTypeDef(name) - if (!flags.is(Flags.Module)) { + if (!flags.is(Flags.Module)) for paramClause <- paramss do paramClause match case TermParamClause(params) => printArgsDefs(params) case TypeParamClause(params) => printTargsDefs(stats.collect { case targ: TypeDef => targ }.filter(_.symbol.isTypeParam).zip(params)) - } - val parents1 = parents.filter { + val parents1 = parents.filter: case Apply(Select(New(tpt), _), _) => tpt.tpe.typeSymbol != Symbol.requiredClass("java.lang.Object") case TypeSelect(Select(Ident("_root_"), "scala"), "Product") => false case TypeSelect(Select(Ident("_root_"), "scala"), "Serializable") => false case _ => true - } if (parents1.nonEmpty) this += highlightKeyword(" extends ") - def printParent(parent: Tree /* Term | TypeTree */, needEmptyParens: Boolean = false): Unit = parent match { + def printParent(parent: Tree /* Term | TypeTree */, needEmptyParens: Boolean = false): Unit = parent match case parent: TypeTree => printTypeTree(parent)(using Some(cdef.symbol)) case TypeApply(fun, targs) => @@ -189,21 +178,19 @@ object SourceCode { printType(newTree.tpe)(using Some(cdef.symbol)) case parent: Term => throw new MatchError(parent.show(using Printer.TreeStructure)) - } - def printSeparated(list: List[Tree /* Term | TypeTree */]): Unit = list match { + def printSeparated(list: List[Tree /* Term | TypeTree */]): Unit = list match case Nil => case x :: Nil => printParent(x) case x :: xs => printParent(x) this += highlightKeyword(" with ") printSeparated(xs) - } printSeparated(parents1) - def keepDefinition(d: Definition): Boolean = { + def keepDefinition(d: Definition): Boolean = val flags = d.symbol.flags - def isUndecompilableCaseClassMethod: Boolean = { + def isUndecompilableCaseClassMethod: Boolean = // Currently the compiler does not allow overriding some of the methods generated for case classes d.symbol.flags.is(Flags.Synthetic) && (d match { @@ -215,34 +202,27 @@ object SourceCode { n == "productElementName" case _ => false }) - } def isInnerModuleObject = d.symbol.flags.is(Flags.Lazy) && d.symbol.flags.is(Flags.Module) !flags.is(Flags.Param) && !flags.is(Flags.ParamAccessor) && !flags.is(Flags.FieldAccessor) && !isUndecompilableCaseClassMethod && !isInnerModuleObject - } - val stats1 = stats.collect { + val stats1 = stats.collect: case stat: Definition if keepDefinition(stat) => stat case stat @ (_:Import | _:Export) => stat case stat: Term => stat - } - def printBody(printSelf: Boolean) = { + def printBody(printSelf: Boolean) = this += " {" - indented { - if (printSelf) { + indented: + if (printSelf) val Some(ValDef(name, tpt, _)) = self: @unchecked - indented { + indented: val name1 = if (name == "_") "this" else name this += " " += highlightValDef(name1) += ": " printTypeTree(tpt)(using Some(cdef.symbol)) this += " =>" - } - } this += lineBreak() printTrees(stats1, lineBreak()) - } this += lineBreak() += "}" - } - self match { + self match case Some(ValDef(_, Singleton(_), _)) => if (stats1.nonEmpty) printBody(printSelf = false) @@ -251,7 +231,6 @@ object SourceCode { case _ => if (stats1.nonEmpty) printBody(printSelf = false) - } this case tdef @ TypeDef(name, rhs) => @@ -276,16 +255,15 @@ object SourceCode { val name1 = splicedName(vdef.symbol).getOrElse(name) this += highlightValDef(name1) += ": " printTypeTree(tpt) - rhs match { + rhs match case Some(tree) => this += " = " printTree(tree) case None => this - } case While(cond, body) => - (cond, body) match { + (cond, body) match case (Block(Block(Nil, body1) :: Nil, Block(Nil, cond1)), Literal(UnitConstant())) => this += highlightKeyword("do ") printTree(body1) += highlightKeyword(" while ") @@ -294,7 +272,6 @@ object SourceCode { this += highlightKeyword("while ") inParens(printTree(cond)) += " " printTree(body) - } case ddef @ DefDef(name, paramss, tpt, rhs) => printDefAnnotations(ddef) @@ -315,26 +292,23 @@ object SourceCode { clause match case TermParamClause(params) => printArgsDefs(params) case TypeParamClause(params) => printTargsDefs(params.zip(params)) - if (!isConstructor) { + if (!isConstructor) this += ": " printTypeTree(tpt) - } - rhs match { + rhs match case Some(tree) => this += " = " printTree(tree) case None => - } this case Wildcard() => this += "_" case tree: Ident => - splicedName(tree.symbol) match { + splicedName(tree.symbol) match case Some(name) => this += highlightTypeDef(name) case _ => printType(tree.tpe) - } case Select(qual, name) => printQualTree(qual) @@ -346,11 +320,10 @@ object SourceCode { printConstant(const) case This(id) => - id match { + id match case Some(name) => this += name.stripSuffix("$") += "." case None => - } this += "this" case tree: New => @@ -366,19 +339,17 @@ object SourceCode { printTree(expr) case Apply(fn, args) if fn.symbol == Symbol.requiredMethod("scala.quoted.runtime.quote") => - args.head match { + args.head match case Block(stats, expr) => this += "'{" - indented { + indented: this += lineBreak() printFlatBlock(stats, expr) - } this += lineBreak() += "}" case _ => this += "'{" printTree(args.head) this += "}" - } case Apply(fn, arg :: Nil) if fn.symbol == Symbol.requiredMethod("scala.quoted.runtime.splice") => this += "${" @@ -387,7 +358,7 @@ object SourceCode { case Apply(fn, args) => var argsPrefix = "" - fn match { + fn match case Select(This(_), "") => this += "this" // call to constructor inside a constructor case Select(qual, "apply") => if qual.tpe.isContextFunctionType then @@ -396,50 +367,44 @@ object SourceCode { argsPrefix += "erased " printQualTree(fn) case _ => printQualTree(fn) - } - val args1 = args match { + val args1 = args match case init :+ Typed(Repeated(Nil, _), _) => init // drop empty var args at the end case _ => args - } - inParens { + inParens: this += argsPrefix printTrees(args1, ", ") - } case TypeApply(fn, args) => printQualTree(fn) - fn match { + fn match case Select(New(Applied(_, _)), "") => // type bounds already printed in `fn` this case _ => inSquare(printTrees(args, ", ")) - } case Super(qual, idOpt) => - qual match { + qual match case This(Some(name)) => this += name += "." case This(None) => - } this += "super" for (id <- idOpt) inSquare(this += id) this case Typed(term, tpt) => - tpt.tpe match { + tpt.tpe match case Types.Repeated(_) => printTree(term) - term match { + term match case Repeated(_, _) | Inlined(None, Nil, Repeated(_, _)) => this case _ => this += ": " += highlightTypeDef("_*") - } case _ => - inParens { + inParens: printTree(term) this += (if (dotty.tools.dotc.util.Chars.isOperatorPart(sb.last)) " : " else ": ") - def printTypeOrAnnots(tpe: TypeRepr): Unit = tpe match { + def printTypeOrAnnots(tpe: TypeRepr): Unit = tpe match case AnnotatedType(tp, annot) if tp == term.tpe => printAnnotation(annot) case AnnotatedType(tp, annot) => @@ -448,10 +413,7 @@ object SourceCode { printAnnotation(annot) case tpe => printType(tpe) - } printTypeOrAnnots(tpt.tpe) - } - } case Assign(lhs, rhs) => printTree(lhs) @@ -459,17 +421,15 @@ object SourceCode { printTree(rhs) case tree @ Lambda(params, body) => // must come before `Block` - inParens { + inParens: printArgsDefs(params) this += (if tree.tpe.isContextFunctionType then " ?=> " else " => ") printTree(body) - } case Block(stats0, expr) => - val stats = stats0.filter { + val stats = stats0.filter: case tree: ValDef => !tree.symbol.flags.is(Flags.Module) case _ => true - } printFlatBlock(stats, expr) case Inlined(_, bindings, expansion) => @@ -495,17 +455,15 @@ object SourceCode { case Try(body, cases, finallyOpt) => this += highlightKeyword("try ") printTree(body) - if (cases.nonEmpty) { + if (cases.nonEmpty) this += highlightKeyword(" catch") inBlock(printCases(cases, lineBreak())) - } - finallyOpt match { + finallyOpt match case Some(t) => this += highlightKeyword(" finally ") printTree(t) case None => this - } case Return(expr, from) => this += "return " @@ -538,19 +496,17 @@ object SourceCode { case _ => throw new MatchError(tree.show(using Printer.TreeStructure)) - } - private def printQualTree(tree: Tree): this.type = tree match { + private def printQualTree(tree: Tree): this.type = tree match case _: If | _: Match | _: While | _: Try | _: Return => this += "(" printTree(tree) this += ")" case _ => printTree(tree) - } - private def flatBlock(stats: List[Statement], expr: Term): (List[Statement], Term) = { + private def flatBlock(stats: List[Statement], expr: Term): (List[Statement], Term) = val flatStats = List.newBuilder[Statement] - def extractFlatStats(stat: Statement): Unit = stat match { + def extractFlatStats(stat: Statement): Unit = stat match case Lambda(_, _) => // must come before `Block` flatStats += stat case Block(stats1, expr1) => @@ -565,8 +521,7 @@ object SourceCode { extractFlatStats(expansion) case Literal(UnitConstant()) => // ignore case stat => flatStats += stat - } - def extractFlatExpr(term: Term): Term = term match { + def extractFlatExpr(term: Term): Term = term match case Lambda(_, _) => // must come before `Block` term case Block(stats1, expr1) => @@ -580,36 +535,30 @@ object SourceCode { extractFlatStats(it.next()) extractFlatExpr(expansion) case term => term - } val it = stats.iterator while (it.hasNext) extractFlatStats(it.next()) val flatExpr = extractFlatExpr(expr) (flatStats.result(), flatExpr) - } - private def printFlatBlock(stats: List[Statement], expr: Term)(using elideThis: Option[Symbol]): this.type = { + private def printFlatBlock(stats: List[Statement], expr: Term)(using elideThis: Option[Symbol]): this.type = val (stats1, expr1) = flatBlock(stats, expr) val splicedTypeAnnot = Symbol.requiredClass("scala.quoted.runtime.SplicedType").primaryConstructor - val stats2 = stats1.filter { + val stats2 = stats1.filter: case tree: TypeDef => !tree.symbol.hasAnnotation(splicedTypeAnnot) case _ => true - } - if (stats2.isEmpty) { + if (stats2.isEmpty) printTree(expr1) - } else { + else this += "{" - indented { + indented: printStats(stats2, expr1) - } this += lineBreak() += "}" - } - } - private def printStats(stats: List[Tree], expr: Tree)(using eliseThis: Option[Symbol]): Unit = { - def printSeparator(next: Tree): Unit = { + private def printStats(stats: List[Tree], expr: Tree)(using eliseThis: Option[Symbol]): Unit = + def printSeparator(next: Tree): Unit = // Avoid accidental application of opening `{` on next line with a double break - def rec(next: Tree): Unit = next match { + def rec(next: Tree): Unit = next match case Lambda(_, _) => this += lineBreak() case Block(stats, _) if stats.nonEmpty => this += doubleLineBreak() case Inlined(_, bindings, _) if bindings.nonEmpty => this += doubleLineBreak() @@ -618,41 +567,33 @@ object SourceCode { case TypeApply(fn, _) => rec(fn) case Typed(_, _) => this += doubleLineBreak() case _ => this += lineBreak() - } - next match { + next match case term: Term => - flatBlock(Nil, term) match { + flatBlock(Nil, term) match case (next :: _, _) => rec(next) case (Nil, next) => rec(next) - } case _ => this += lineBreak() - } - } - def printSeparated(list: List[Tree]): Unit = list match { + def printSeparated(list: List[Tree]): Unit = list match case Nil => printTree(expr) case x :: xs => printTree(x) printSeparator(if (xs.isEmpty) expr else xs.head) printSeparated(xs) - } this += lineBreak() printSeparated(stats) - } - private def printList[T](xs: List[T], sep: String, print: T => this.type): this.type = { - def printSeparated(list: List[T]): Unit = list match { + private def printList[T](xs: List[T], sep: String, print: T => this.type): this.type = + def printSeparated(list: List[T]): Unit = list match case Nil => case x :: Nil => print(x) case x :: xs => print(x) this += sep printSeparated(xs) - } printSeparated(xs) this - } private def printTrees(trees: List[Tree], sep: String)(using elideThis: Option[Symbol]): this.type = printList(trees, sep, (t: Tree) => printTree(t)) @@ -660,122 +601,104 @@ object SourceCode { private def printTypeTrees(trees: List[TypeTree], sep: String)(using elideThis: Option[Symbol] = None): this.type = printList(trees, sep, (t: TypeTree) => printTypeTree(t)) - private def printTypes(trees: List[TypeRepr], sep: String)(using elideThis: Option[Symbol]): this.type = { - def printSeparated(list: List[TypeRepr]): Unit = list match { + private def printTypes(trees: List[TypeRepr], sep: String)(using elideThis: Option[Symbol]): this.type = + def printSeparated(list: List[TypeRepr]): Unit = list match case Nil => case x :: Nil => printType(x) case x :: xs => printType(x) this += sep printSeparated(xs) - } printSeparated(trees) this - } - private def printSelectors(selectors: List[Selector]): this.type = { - def printSeparated(list: List[Selector]): Unit = list match { + private def printSelectors(selectors: List[Selector]): this.type = + def printSeparated(list: List[Selector]): Unit = list match case Nil => case x :: Nil => printSelector(x) case x :: xs => printSelector(x) this += ", " printSeparated(xs) - } this += "{" printSeparated(selectors) this += "}" - } - private def printCases(cases: List[CaseDef], sep: String): this.type = { - def printSeparated(list: List[CaseDef]): Unit = list match { + private def printCases(cases: List[CaseDef], sep: String): this.type = + def printSeparated(list: List[CaseDef]): Unit = list match case Nil => case x :: Nil => printCaseDef(x) case x :: xs => printCaseDef(x) this += sep printSeparated(xs) - } printSeparated(cases) this - } - private def printTypeCases(cases: List[TypeCaseDef], sep: String): this.type = { - def printSeparated(list: List[TypeCaseDef]): Unit = list match { + private def printTypeCases(cases: List[TypeCaseDef], sep: String): this.type = + def printSeparated(list: List[TypeCaseDef]): Unit = list match case Nil => case x :: Nil => printTypeCaseDef(x) case x :: xs => printTypeCaseDef(x) this += sep printSeparated(xs) - } printSeparated(cases) this - } - private def printPatterns(cases: List[Tree], sep: String): this.type = { - def printSeparated(list: List[Tree]): Unit = list match { + private def printPatterns(cases: List[Tree], sep: String): this.type = + def printSeparated(list: List[Tree]): Unit = list match case Nil => case x :: Nil => printPattern(x) case x :: xs => printPattern(x) this += sep printSeparated(xs) - } printSeparated(cases) this - } - private def printTypesOrBounds(types: List[TypeRepr], sep: String)(using elideThis: Option[Symbol]): this.type = { - def printSeparated(list: List[TypeRepr]): Unit = list match { + private def printTypesOrBounds(types: List[TypeRepr], sep: String)(using elideThis: Option[Symbol]): this.type = + def printSeparated(list: List[TypeRepr]): Unit = list match case Nil => case x :: Nil => printType(x) case x :: xs => printType(x) this += sep printSeparated(xs) - } printSeparated(types) this - } - private def printTargsDefs(targs: List[(TypeDef, TypeDef)], isDef:Boolean = true)(using elideThis: Option[Symbol]): Unit = { - if (!targs.isEmpty) { - def printSeparated(list: List[(TypeDef, TypeDef)]): Unit = list match { + private def printTargsDefs(targs: List[(TypeDef, TypeDef)], isDef:Boolean = true)(using elideThis: Option[Symbol]): Unit = + if (!targs.isEmpty) + def printSeparated(list: List[(TypeDef, TypeDef)]): Unit = list match case Nil => case x :: Nil => printTargDef(x, isDef = isDef) case x :: xs => printTargDef(x, isDef = isDef) this += ", " printSeparated(xs) - } inSquare(printSeparated(targs)) - } - } - private def printTargDef(arg: (TypeDef, TypeDef), isMember: Boolean = false, isDef:Boolean = true)(using elideThis: Option[Symbol]): this.type = { + private def printTargDef(arg: (TypeDef, TypeDef), isMember: Boolean = false, isDef:Boolean = true)(using elideThis: Option[Symbol]): this.type = val (argDef, argCons) = arg - if (isDef) { - if (argDef.symbol.flags.is(Flags.Covariant)) { + if (isDef) + if (argDef.symbol.flags.is(Flags.Covariant)) this += highlightValDef("+") - } else if (argDef.symbol.flags.is(Flags.Contravariant)) { + else if (argDef.symbol.flags.is(Flags.Contravariant)) this += highlightValDef("-") - } - } this += argCons.name - argCons.rhs match { + argCons.rhs match case rhs: TypeBoundsTree => printBoundsTree(rhs) case rhs: WildcardTypeTree => printType(rhs.tpe) case rhs @ LambdaTypeTree(tparams, body) => - def printParam(t: Tree /*TypeTree | TypeBoundsTree*/): Unit = t match { + def printParam(t: Tree /*TypeTree | TypeBoundsTree*/): Unit = t match case t: TypeBoundsTree => printBoundsTree(t) case t: TypeTree => printTypeTree(t) - } - def printSeparated(list: List[TypeDef]): Unit = list match { + def printSeparated(list: List[TypeDef]): Unit = list match case Nil => case x :: Nil => this += x.name @@ -785,119 +708,98 @@ object SourceCode { printParam(x.rhs) this += ", " printSeparated(xs) - } inSquare(printSeparated(tparams)) - if (isMember) { - body match { + if (isMember) + body match case MatchTypeTree(Some(bound), _, _) => this += " <: " printTypeTree(bound) case _ => - } this += " = " printTypeOrBoundsTree(body) - } else this case rhs: TypeTree => this += " = " printTypeTree(rhs) - } - } - private def printArgsDefs(args: List[ValDef])(using elideThis: Option[Symbol]): Unit = { - val argFlags = args match { + private def printArgsDefs(args: List[ValDef])(using elideThis: Option[Symbol]): Unit = + val argFlags = args match case Nil => Flags.EmptyFlags case arg :: _ => arg.symbol.flags - } - if (argFlags.is(Flags.Erased | Flags.Given)) { + if (argFlags.is(Flags.Erased | Flags.Given)) if (argFlags.is(Flags.Given)) this += " given" if (argFlags.is(Flags.Erased)) this += " erased" this += " " - } - inParens { + inParens: if (argFlags.is(Flags.Implicit) && !argFlags.is(Flags.Given)) this += "implicit " - def printSeparated(list: List[ValDef]): Unit = list match { + def printSeparated(list: List[ValDef]): Unit = list match case Nil => case x :: Nil => printParamDef(x) case x :: xs => printParamDef(x) this += ", " printSeparated(xs) - } printSeparated(args) - } - } - private def printAnnotations(trees: List[Term])(using elideThis: Option[Symbol]): this.type = { - def printSeparated(list: List[Term]): Unit = list match { + private def printAnnotations(trees: List[Term])(using elideThis: Option[Symbol]): this.type = + def printSeparated(list: List[Term]): Unit = list match case Nil => case x :: Nil => printAnnotation(x) case x :: xs => printAnnotation(x) this += " " printSeparated(xs) - } printSeparated(trees) this - } - private def printParamDef(arg: ValDef)(using elideThis: Option[Symbol]): Unit = { + private def printParamDef(arg: ValDef)(using elideThis: Option[Symbol]): Unit = val name = splicedName(arg.symbol).getOrElse(arg.symbol.name) val sym = arg.symbol.owner if sym.isDefDef && sym.name == "" then val ClassDef(_, _, _, _, body) = sym.owner.tree: @unchecked - body.collectFirst { + body.collectFirst: case vdef @ ValDef(`name`, _, _) if vdef.symbol.flags.is(Flags.ParamAccessor) => - if (!vdef.symbol.flags.is(Flags.Local)) { + if (!vdef.symbol.flags.is(Flags.Local)) var printedPrefix = false - if (vdef.symbol.flags.is(Flags.Override)) { + if (vdef.symbol.flags.is(Flags.Override)) this += "override " printedPrefix = true - } printedPrefix |= printProtectedOrPrivate(vdef) if (vdef.symbol.flags.is(Flags.Mutable)) this += highlightValDef("var ") else if (printedPrefix || !vdef.symbol.flags.is(Flags.CaseAccessor)) this += highlightValDef("val ") - } - } end if this += highlightValDef(name) += ": " printTypeTree(arg.tpt) - } - private def printCaseDef(caseDef: CaseDef): this.type = { + private def printCaseDef(caseDef: CaseDef): this.type = this += highlightValDef("case ") printPattern(caseDef.pattern) - caseDef.guard match { + caseDef.guard match case Some(t) => this += " if " printTree(t) case None => - } this += highlightValDef(" =>") - indented { - caseDef.rhs match { + indented: + caseDef.rhs match case Block(stats, expr) => printStats(stats, expr)(using None) case body => this += lineBreak() printTree(body) - } - } this - } - private def printTypeCaseDef(caseDef: TypeCaseDef): this.type = { + private def printTypeCaseDef(caseDef: TypeCaseDef): this.type = this += highlightValDef("case ") printTypeTree(caseDef.pattern) this += highlightValDef(" => ") printTypeTree(caseDef.rhs) this - } - private def printPattern(pattern: Tree): this.type = pattern match { + private def printPattern(pattern: Tree): this.type = pattern match case Wildcard() => this += "_" @@ -913,18 +815,16 @@ object SourceCode { printPattern(pattern) case Unapply(fun, implicits, patterns) => - val fun2 = fun match { + val fun2 = fun match case TypeApply(fun2, _) => fun2 case _ => fun - } - fun2 match { + fun2 match case Select(extractor, "unapply" | "unapplySeq") => printTree(extractor) case Ident("unapply" | "unapplySeq") => this += fun.symbol.owner.fullName.stripSuffix("$") case _ => throw new MatchError(fun.show(using Printer.TreeStructure)) - } inParens(printPatterns(patterns, ", ")) case Alternatives(trees) => @@ -944,12 +844,11 @@ object SourceCode { case _ => throw new MatchError(pattern.show(using Printer.TreeStructure)) - } inline private val qc = '\'' inline private val qSc = '"' - def printConstant(const: Constant): this.type = const match { + def printConstant(const: Constant): this.type = const match case UnitConstant() => this += highlightLiteral("()") case NullConstant() => this += highlightLiteral("null") case BooleanConstant(v) => this += highlightLiteral(v.toString) @@ -964,9 +863,8 @@ object SourceCode { case ClassOfConstant(v) => this += "classOf" inSquare(printType(v)) - } - private def printTypeOrBoundsTree(tpt: Tree)(using elideThis: Option[Symbol] = None): this.type = tpt match { + private def printTypeOrBoundsTree(tpt: Tree)(using elideThis: Option[Symbol] = None): this.type = tpt match case TypeBoundsTree(lo, hi) => this += "_ >: " printTypeTree(lo) @@ -976,7 +874,6 @@ object SourceCode { printType(tpt.tpe) case tpt: TypeTree => printTypeTree(tpt) - } /** Print type tree * @@ -986,10 +883,10 @@ object SourceCode { * Self type annotation and types in parent list should elide current class * prefix `C.this` to avoid type checking errors. */ - private def printTypeTree(tree: TypeTree)(using elideThis: Option[Symbol] = None): this.type = tree match { + private def printTypeTree(tree: TypeTree)(using elideThis: Option[Symbol] = None): this.type = tree match case Inferred() => // TODO try to move this logic into `printType` - def printTypeAndAnnots(tpe: TypeRepr): this.type = tpe match { + def printTypeAndAnnots(tpe: TypeRepr): this.type = tpe match case AnnotatedType(tp, annot) => printTypeAndAnnots(tp) this += " " @@ -1007,7 +904,6 @@ object SourceCode { printType(tpe) this += ".type" case tpe => printType(tpe) - } printTypeAndAnnots(tree.tpe) case TypeIdent(name) => @@ -1021,10 +917,9 @@ object SourceCode { case Singleton(ref) => printTree(ref) - ref match { + ref match case Literal(_) => this case _ => this += ".type" - } case Refined(tpt, refinements) => printTypeTree(tpt) @@ -1036,7 +931,7 @@ object SourceCode { case Annotated(tpt, annot) => val Annotation(ref, args) = annot: @unchecked - ref.tpe match { + ref.tpe match case tpe: TypeRef if tpe.typeSymbol == Symbol.requiredClass("scala.annotation.internal.Repeated") => val Types.Sequence(tp) = tpt.tpe: @unchecked printType(tp) @@ -1045,7 +940,6 @@ object SourceCode { printTypeTree(tpt) this += " " printAnnotation(annot) - } case MatchTypeTree(bound, selector, cases) => printTypeTree(selector) @@ -1070,7 +964,6 @@ object SourceCode { case _ => throw new MatchError(tree.show(using Printer.TreeStructure)) - } /** Print type * @@ -1080,22 +973,21 @@ object SourceCode { * Self type annotation and types in parent list should elide current class * prefix `C.this` to avoid type checking errors. */ - def printType(tpe: TypeRepr)(using elideThis: Option[Symbol] = None): this.type = tpe match { + def printType(tpe: TypeRepr)(using elideThis: Option[Symbol] = None): this.type = tpe match case ConstantType(const) => printConstant(const) case tpe: TypeRef => val sym = tpe.typeSymbol if fullNames then - tpe.qualifier match { + tpe.qualifier match case ThisType(tp) if tp.typeSymbol == defn.RootClass || tp.typeSymbol == defn.EmptyPackageClass => case NoPrefix() => - if (sym.owner.flags.is(Flags.Package)) { + if (sym.owner.flags.is(Flags.Package)) // TODO should these be in the prefix? These are at least `scala`, `java` and `scala.collection`. val packagePath = sym.owner.fullName.stripPrefix("").stripPrefix("").stripPrefix(".") if (packagePath != "") this += packagePath += "." - } case prefix: TermRef if prefix.termSymbol.isClassDef => printType(prefix) this += "#" @@ -1107,12 +999,11 @@ object SourceCode { case prefix: TypeRepr => printType(prefix) this += "." - } this += highlightTypeDef(sym.name.stripSuffix("$")) case TermRef(prefix, name) => if fullNames then - prefix match { + prefix match case NoPrefix() => this += highlightTypeDef(name) case ThisType(tp) if tp.typeSymbol == defn.RootClass || tp.typeSymbol == defn.EmptyPackageClass => @@ -1122,7 +1013,6 @@ object SourceCode { if (name != "package") this += "." += highlightTypeDef(name) this - } else this += highlightTypeDef(name) @@ -1130,7 +1020,7 @@ object SourceCode { printRefinement(tpe) case AppliedType(tp, args) => - tp match { + tp match case tp: TypeLambda => this += "(" printType(tp) @@ -1141,7 +1031,6 @@ object SourceCode { case _ => printType(tp) inSquare(printTypesOrBounds(args, ", ")) - } case AnnotatedType(tp, annot) => val Annotation(ref, args) = annot: @unchecked @@ -1169,24 +1058,21 @@ object SourceCode { printType(tp) case ThisType(tp) => - tp match { + tp match case tp: TypeRef if !tp.typeSymbol.flags.is(Flags.Module) => printFullClassName(tp) this += highlightTypeDef(".this") case TypeRef(prefix, name) if name.endsWith("$") => - if (fullNames){ - prefix match { + if (fullNames) + prefix match case NoPrefix() => case ThisType(tp) if tp.typeSymbol == defn.RootClass || tp.typeSymbol == defn.EmptyPackageClass => case _ => printType(prefix) this += "." - } - } this += highlightTypeDef(name.stripSuffix("$")) case _ => printType(tp) - } case SuperType(thistpe, supertpe) => printType(supertpe) @@ -1198,11 +1084,10 @@ object SourceCode { printType(body) case ParamRef(lambda, idx) => - lambda match { + lambda match case MethodType(params, _, _) => this += params(idx) case PolyType(params, _, _) => this += params(idx) case TypeLambda(params, _, _) => this += params(idx) - } case RecursiveType(tpe) => printType(tpe) @@ -1238,9 +1123,8 @@ object SourceCode { case _ => throw new MatchError(tpe.show(using Printer.TypeReprStructure)) - } - private def printSelector(sel: Selector): this.type = sel match { + private def printSelector(sel: Selector): this.type = sel match case SimpleSelector(name) => this += name case OmitSelector(name) => this += name += " => _" case RenameSelector(name, newName) => this += name += " => " += newName @@ -1251,16 +1135,14 @@ object SourceCode { printTree(tpt) case _ => this += "given" - } - private def printDefinitionName(tree: Definition): this.type = tree match { + private def printDefinitionName(tree: Definition): this.type = tree match case ValDef(name, _, _) => this += highlightValDef(name) case DefDef(name, _, _, _) => this += highlightValDef(name) case ClassDef(name, _, _, _, _) => this += highlightTypeDef(name.stripSuffix("$")) case TypeDef(name, _) => this += highlightTypeDef(name) - } - private def printAnnotation(annot: Term)(using elideThis: Option[Symbol]): this.type = { + private def printAnnotation(annot: Term)(using elideThis: Option[Symbol]): this.type = val Annotation(ref, args) = annot: @unchecked this += "@" printTypeTree(ref) @@ -1268,23 +1150,20 @@ object SourceCode { this else inParens(printTrees(args, ", ")) - } - private def printDefAnnotations(definition: Definition)(using elideThis: Option[Symbol]): this.type = { - val annots = definition.symbol.annotations.filter { + private def printDefAnnotations(definition: Definition)(using elideThis: Option[Symbol]): this.type = + val annots = definition.symbol.annotations.filter: case Annotation(annot, _) => val sym = annot.tpe.typeSymbol sym != Symbol.requiredClass("scala.forceInline") && sym.maybeOwner != Symbol.requiredPackage("scala.annotation.internal") case x => throw new MatchError(x.show(using Printer.TreeStructure)) - } printAnnotations(annots) if (annots.nonEmpty) this += " " else this - } - private def printRefinement(tpe: TypeRepr)(using elideThis: Option[Symbol]): this.type = { - def printMethodicType(tp: TypeRepr): Unit = tp match { + private def printRefinement(tpe: TypeRepr)(using elideThis: Option[Symbol]): this.type = + def printMethodicType(tp: TypeRepr): Unit = tp match case tp @ MethodType(paramNames, params, res) => inParens(printMethodicTypeParams(paramNames, params)) printMethodicType(res) @@ -1297,13 +1176,12 @@ object SourceCode { case tp: TypeRepr => this += ": " printType(tp) - } - def rec(tp: TypeRepr): Unit = tp match { + def rec(tp: TypeRepr): Unit = tp match case Refinement(parent, name, info) => rec(parent) - indented { + indented: this += lineBreak() - info match { + info match case info: TypeBounds => this += highlightKeyword("type ") += highlightTypeDef(name) printBounds(info) @@ -1313,24 +1191,19 @@ object SourceCode { case info: TypeRepr => this += highlightKeyword("val ") += highlightValDef(name) printMethodicType(info) - } - } case tp => printType(tp) this += " {" - } rec(tpe) this += lineBreak() += "}" - } - private def printMethodicTypeParams(paramNames: List[String], params: List[TypeRepr])(using elideThis: Option[Symbol]): Unit = { - def printInfo(info: TypeRepr) = info match { + private def printMethodicTypeParams(paramNames: List[String], params: List[TypeRepr])(using elideThis: Option[Symbol]): Unit = + def printInfo(info: TypeRepr) = info match case info: TypeBounds => printBounds(info) case info: TypeRepr => this += ": " printType(info) - } - def printSeparated(list: List[(String, TypeRepr)]): Unit = list match { + def printSeparated(list: List[(String, TypeRepr)]): Unit = list match case Nil => case (name, info) :: Nil => this += name @@ -1340,75 +1213,62 @@ object SourceCode { printInfo(info) this += ", " printSeparated(xs) - } printSeparated(paramNames.zip(params)) - } - private def printBoundsTree(bounds: TypeBoundsTree)(using elideThis: Option[Symbol]): this.type = { + private def printBoundsTree(bounds: TypeBoundsTree)(using elideThis: Option[Symbol]): this.type = if bounds.low.tpe == bounds.hi.tpe then this += " = " printTypeTree(bounds.low) else - bounds.low match { + bounds.low match case Inferred() => case low => this += " >: " printTypeTree(low) - } - bounds.hi match { + bounds.hi match case Inferred() => this case hi => this += " <: " printTypeTree(hi) - } - } - private def printBounds(bounds: TypeBounds)(using elideThis: Option[Symbol]): this.type = { + private def printBounds(bounds: TypeBounds)(using elideThis: Option[Symbol]): this.type = this += " >: " printType(bounds.low) this += " <: " printType(bounds.hi) - } - private def printProtectedOrPrivate(definition: Definition): Boolean = { + private def printProtectedOrPrivate(definition: Definition): Boolean = var prefixWasPrinted = false - def printWithin(within: TypeRepr) = within match { + def printWithin(within: TypeRepr) = within match case TypeRef(_, name) => this += name case _ => printFullClassName(within) - } - if (definition.symbol.flags.is(Flags.Protected)) { + if (definition.symbol.flags.is(Flags.Protected)) this += highlightKeyword("protected") - definition.symbol.protectedWithin match { + definition.symbol.protectedWithin match case Some(within) => inSquare(printWithin(within)) case _ => - } prefixWasPrinted = true - } else { - definition.symbol.privateWithin match { + else + definition.symbol.privateWithin match case Some(within) => this += highlightKeyword("private") inSquare(printWithin(within)) prefixWasPrinted = true case _ => - } - } if (prefixWasPrinted) this += " " prefixWasPrinted - } - private def printFullClassName(tp: TypeRepr): Unit = { - def printClassPrefix(prefix: TypeRepr): Unit = prefix match { + private def printFullClassName(tp: TypeRepr): Unit = + def printClassPrefix(prefix: TypeRepr): Unit = prefix match case TypeRef(prefix2, name) if fullNames => printClassPrefix(prefix2) this += name += "." case _ => - } val TypeRef(prefix, name) = tp: @unchecked printClassPrefix(prefix) this += name - } private def +=(x: Boolean): this.type = { sb.append(x); this } private def +=(x: Byte): this.type = { sb.append(x); this } @@ -1420,7 +1280,7 @@ object SourceCode { private def +=(x: Char): this.type = { sb.append(x); this } private def +=(x: String): this.type = { sb.append(x); this } - private def escapedChar(ch: Char): String = (ch: @switch) match { + private def escapedChar(ch: Char): String = (ch: @switch) match case '\b' => "\\b" case '\t' => "\\t" case '\n' => "\\n" @@ -1430,16 +1290,15 @@ object SourceCode { case '\'' => "\\\'" case '\\' => "\\\\" case _ => if ch.isControl then f"${"\\"}u${ch.toInt}%04x" else String.valueOf(ch).nn - } private def escapedString(str: String): String = str flatMap escapedChar private[this] val names = collection.mutable.Map.empty[Symbol, String] private[this] val namesIndex = collection.mutable.Map.empty[String, Int] - private def splicedName(sym: Symbol): Option[String] = { + private def splicedName(sym: Symbol): Option[String] = if sym.owner.isClassDef then None - else names.get(sym).orElse { + else names.get(sym).orElse: val name0 = sym.name val index = namesIndex.getOrElse(name0, 1) namesIndex(name0) = index + 1 @@ -1448,56 +1307,40 @@ object SourceCode { else s"`$name0${index.toString.toCharArray.nn.map {x => (x - '0' + '₀').toChar}.mkString}`" names(sym) = name Some(name) - } - } - private object SpecialOp { - def unapply(arg: Tree): Option[(String, List[Term])] = arg match { + private object SpecialOp: + def unapply(arg: Tree): Option[(String, List[Term])] = arg match case arg @ Apply(fn, args) => - fn.tpe match { + fn.tpe match case tpe @ TermRef(ThisType(TypeRef(_, name)), name2) if name == "" => Some((name2, args)) case _ => None - } case _ => None - } - } - private object Annotation { - def unapply(arg: Tree): Option[(TypeTree, List[Term])] = arg match { + private object Annotation: + def unapply(arg: Tree): Option[(TypeTree, List[Term])] = arg match case New(annot) => Some((annot, Nil)) case Apply(Select(New(annot), ""), args) => Some((annot, args)) case Apply(TypeApply(Select(New(annot), ""), targs), args) => Some((annot, args)) case _ => None - } - } // TODO Provide some of these in scala.tasty.Reflection.scala and implement them using checks on symbols for performance - private object Types { + private object Types: - object Sequence { - def unapply(tpe: TypeRepr): Option[TypeRepr] = tpe match { + object Sequence: + def unapply(tpe: TypeRepr): Option[TypeRepr] = tpe match case AppliedType(seq, (tp: TypeRepr) :: Nil) if seq.typeSymbol == Symbol.requiredClass("scala.collection.Seq") || seq.typeSymbol == Symbol.requiredClass("scala.collection.immutable.Seq") => Some(tp) case _ => None - } - } - object Repeated { - def unapply(tpe: TypeRepr): Option[TypeRepr] = tpe match { + object Repeated: + def unapply(tpe: TypeRepr): Option[TypeRepr] = tpe match case AppliedType(rep, (tp: TypeRepr) :: Nil) if rep.typeSymbol == Symbol.requiredClass("scala.") => Some(tp) case _ => None - } - } - } - private object PackageObject { - def unapply(tree: Tree): Option[Tree] = tree match { + private object PackageObject: + def unapply(tree: Tree): Option[Tree] = tree match case PackageClause(_, ValDef("package", _, _) :: body :: Nil) => Some(body) case _ => None - } - } - } -} diff --git a/compiler/src/scala/quoted/runtime/impl/printers/SyntaxHighlight.scala b/compiler/src/scala/quoted/runtime/impl/printers/SyntaxHighlight.scala index cc3ecc2b153a..74d037e29301 100644 --- a/compiler/src/scala/quoted/runtime/impl/printers/SyntaxHighlight.scala +++ b/compiler/src/scala/quoted/runtime/impl/printers/SyntaxHighlight.scala @@ -1,7 +1,7 @@ package scala.quoted package runtime.impl.printers -trait SyntaxHighlight { +trait SyntaxHighlight: def highlightKeyword(str: String): String def highlightTypeDef(str: String): String def highlightLiteral(str: String): String @@ -10,11 +10,10 @@ trait SyntaxHighlight { def highlightAnnotation(str: String): String def highlightString(str: String): String def highlightTripleQs: String -} -object SyntaxHighlight { +object SyntaxHighlight: - def ANSI: SyntaxHighlight = new SyntaxHighlight { + def ANSI: SyntaxHighlight = new SyntaxHighlight: // Keep in sync with SyntaxHighlighting private val NoColor = Console.RESET private val CommentColor = Console.BLUE @@ -33,9 +32,8 @@ object SyntaxHighlight { def highlightAnnotation(str: String): String = AnnotationColor + str + NoColor def highlightString(str: String): String = StringColor + str + NoColor def highlightTripleQs: String = Console.RED_B + "???" + NoColor - } - def plain: SyntaxHighlight = new SyntaxHighlight { + def plain: SyntaxHighlight = new SyntaxHighlight: def highlightKeyword(str: String): String = str def highlightTypeDef(str: String): String = str def highlightLiteral(str: String): String = str @@ -44,5 +42,3 @@ object SyntaxHighlight { def highlightAnnotation(str: String): String = str def highlightString(str: String): String = str def highlightTripleQs: String = "???" - } -}