diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala
index 91627864ac64..121806dd36ed 100644
--- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala
+++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala
@@ -39,6 +39,11 @@ object desugar {
*/
val CheckIrrefutable: Property.Key[MatchCheck] = Property.StickyKey()
+ /** A multi-line infix operation with the infix operator starting a new line.
+ * Used for explaining potential errors.
+ */
+ val MultiLineInfix: Property.Key[Unit] = Property.StickyKey()
+
/** What static check should be applied to a Match? */
enum MatchCheck {
case None, Exhaustive, IrrefutablePatDef, IrrefutableGenFrom
@@ -1194,7 +1199,10 @@ object desugar {
case Tuple(args) => args.mapConserve(assignToNamedArg)
case _ => arg :: Nil
}
- Apply(Select(fn, op.name).withSpan(selectPos), args)
+ val sel = Select(fn, op.name).withSpan(selectPos)
+ if (left.sourcePos.endLine < op.sourcePos.startLine)
+ sel.pushAttachment(MultiLineInfix, ())
+ Apply(sel, args)
}
if (isLeftAssoc(op.name))
diff --git a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
index c54bd5b08b6b..009a4e44a91e 100644
--- a/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
+++ b/compiler/src/dotty/tools/dotc/config/ScalaSettings.scala
@@ -48,6 +48,9 @@ class ScalaSettings extends Settings.SettingGroup {
val silentWarnings: Setting[Boolean] = BooleanSetting("-nowarn", "Silence all warnings.") withAbbreviation "--no-warnings"
val fromTasty: Setting[Boolean] = BooleanSetting("-from-tasty", "Compile classes from tasty in classpath. The arguments are used as class names.") withAbbreviation "--from-tasty"
+ val newSyntax: Setting[Boolean] = BooleanSetting("-new-syntax", "Require `then` and `do` in control expressions")
+ val oldSyntax: Setting[Boolean] = BooleanSetting("-old-syntax", "Require `(...)` around conditions")
+
/** Decompiler settings */
val printTasty: Setting[Boolean] = BooleanSetting("-print-tasty", "Prints the raw tasty.") withAbbreviation "--print-tasty"
val printLines: Setting[Boolean] = BooleanSetting("-print-lines", "Show source code line numbers.") withAbbreviation "--print-lines"
diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
index 7dd5ccf429e0..b41f44c47c58 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala
@@ -22,7 +22,7 @@ import Constants._
import Symbols.defn
import ScriptParsers._
import Decorators._
-import scala.internal.Chars.isIdentifierStart
+import scala.internal.Chars
import scala.annotation.{tailrec, switch}
import rewrites.Rewrites.patch
@@ -351,6 +351,31 @@ object Parsers {
accept(SEMI)
}
+ def rewriteNotice(additionalOption: String = "") = {
+ val optionStr = if (additionalOption.isEmpty) "" else " " ++ additionalOption
+ i"\nThis construct can be rewritten automatically under$optionStr -rewrite."
+ }
+
+ def syntaxVersionError(option: String, span: Span) = {
+ syntaxError(em"""This construct is not allowed under $option.${rewriteNotice(option)}""", span)
+ }
+
+ def rewriteToNewSyntax(span: Span = Span(in.offset)): Boolean = {
+ if (in.newSyntax) {
+ if (in.rewrite) return true
+ syntaxVersionError("-new-syntax", span)
+ }
+ false
+ }
+
+ def rewriteToOldSyntax(span: Span = Span(in.offset)): Boolean = {
+ if (in.oldSyntax) {
+ if (in.rewrite) return true
+ syntaxVersionError("-old-syntax", span)
+ }
+ false
+ }
+
def errorTermTree: Literal = atSpan(in.offset) { Literal(Constant(null)) }
private[this] var inFunReturnType = false
@@ -525,6 +550,131 @@ object Parsers {
def commaSeparated[T](part: () => T): List[T] = tokenSeparated(COMMA, part)
+ def inSepRegion[T](opening: Token, closing: Token)(op: => T): T = {
+ in.adjustSepRegions(opening)
+ try op finally in.adjustSepRegions(closing)
+ }
+
+/* -------- REWRITES ----------------------------------------------------------- */
+
+ /** A list of pending patches, to be issued if we can rewrite all enclosing braces to
+ * indentation regions.
+ */
+ var pendingPatches: List[() => Unit] = Nil
+
+ def testChar(idx: Int, p: Char => Boolean): Boolean = {
+ val txt = source.content
+ idx < txt.length && p(txt(idx))
+ }
+
+ def testChar(idx: Int, c: Char): Boolean = {
+ val txt = source.content
+ idx < txt.length && txt(idx) == c
+ }
+
+ def testChars(from: Int, str: String): Boolean =
+ str.isEmpty ||
+ testChar(from, str.head) && testChars(from + 1, str.tail)
+
+ def skipBlanks(idx: Int, step: Int = 1): Int =
+ if (testChar(idx, c => c == ' ' || c == '\t' || c == Chars.CR)) skipBlanks(idx + step, step)
+ else idx
+
+ def skipLineCommentsRightOf(idx: Int, column: Int): Int = {
+ val j = skipBlanks(idx)
+ if (testChar(j, '/') && testChar(j + 1, '/') && source.column(j) > column)
+ skipLineCommentsRightOf(source.nextLine(j), column)
+ else idx
+ }
+
+ /** The region to eliminate when replacing a closing `)` or `}` that starts
+ * a new line
+ */
+ def closingElimRegion(): (Offset, Offset) = {
+ val skipped = skipBlanks(in.lastOffset)
+ if (testChar(skipped, Chars.LF)) // if `}` is on a line by itself
+ (source.startOfLine(in.lastOffset), skipped + 1) // skip the whole line
+ else // else
+ (in.lastOffset - 1, skipped) // move the following text up to where the `}` was
+ }
+
+ /** Drop (...) or { ... }, replacing the closing element with `endStr` */
+ def dropParensOrBraces(start: Offset, endStr: String): Unit = {
+ patch(source, Span(start, start + 1),
+ if (testChar(start - 1, Chars.isIdentifierPart)) " " else "")
+ val closingStartsLine = testChar(skipBlanks(in.lastOffset - 2, -1), Chars.LF)
+ val preFill = if (closingStartsLine || endStr.isEmpty) "" else " "
+ val postFill = if (in.lastOffset == in.offset) " " else ""
+ val (startClosing, endClosing) =
+ if (closingStartsLine && endStr.isEmpty) closingElimRegion()
+ else (in.lastOffset - 1, in.lastOffset)
+ patch(source, Span(startClosing, endClosing), s"$preFill$endStr$postFill")
+ }
+
+ /** Drop current token, which is assumed to be `then` or `do`. */
+ def dropTerminator(): Unit = {
+ var startOffset = in.offset
+ var endOffset = in.lastCharOffset
+ if (in.isAfterLineEnd()) {
+ if (testChar(endOffset, ' ')) endOffset += 1
+ }
+ else {
+ if (testChar(startOffset - 1, ' ')) startOffset -= 1
+ }
+ patch(source, Span(startOffset, endOffset), "")
+ }
+
+ /** rewrite code with (...) around the source code of `t` */
+ def revertToParens(t: Tree): Unit =
+ if (t.span.exists) {
+ patch(source, t.span.startPos, "(")
+ patch(source, t.span.endPos, ")")
+ dropTerminator()
+ }
+
+ /** In the tokens following the current one, does `query` precede any of the tokens that
+ * - must start a statement, or
+ * - separate two statements, or
+ * - continue a statement (e.g. `else`, catch`)?
+ */
+ def followedByToken(query: Token): Boolean = {
+ val lookahead = in.lookaheadScanner
+ var braces = 0
+ while (true) {
+ val token = lookahead.token
+ if (braces == 0) {
+ if (token == query) return true
+ if (stopScanTokens.contains(token) || lookahead.token == RBRACE) return false
+ }
+ else if (token == EOF)
+ return false
+ else if (lookahead.token == RBRACE)
+ braces -= 1
+ if (lookahead.token == LBRACE) braces += 1
+ lookahead.nextToken()
+ }
+ false
+ }
+
+ /** A the generators of a for-expression enclosed in (...)? */
+ def parensEncloseGenerators: Boolean = {
+ val lookahead = in.lookaheadScanner
+ var parens = 1
+ lookahead.nextToken()
+ while (parens != 0 && lookahead.token != EOF) {
+ val token = lookahead.token
+ if (token == LPAREN) parens += 1
+ else if (token == RPAREN) parens -= 1
+ lookahead.nextToken()
+ }
+ if (lookahead.token == LARROW)
+ false // it's a pattern
+ else if (lookahead.token != IDENTIFIER && lookahead.token != BACKQUOTED_IDENT)
+ true // it's not a pattern since token cannot be an infix operator
+ else
+ followedByToken(LARROW) // `<-` comes before possible statement starts
+ }
+
/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
var opStack: List[OpInfo] = Nil
@@ -758,7 +908,7 @@ object Parsers {
}
else atSpan(negOffset) {
if (in.token == QUOTEID) {
- if ((staged & StageKind.Spliced) != 0 && isIdentifierStart(in.name(0))) {
+ if ((staged & StageKind.Spliced) != 0 && Chars.isIdentifierStart(in.name(0))) {
val t = atSpan(in.offset + 1) {
val tok = in.toToken(in.name)
tok match {
@@ -844,7 +994,7 @@ object Parsers {
def newLineOptWhenFollowedBy(token: Int): Unit = {
// note: next is defined here because current == NEWLINE
- if (in.token == NEWLINE && in.next.token == token) newLineOpt()
+ if (in.token == NEWLINE && in.next.token == token) in.nextToken()
}
def newLineOptWhenFollowing(p: Int => Boolean): Unit = {
@@ -1235,11 +1385,22 @@ object Parsers {
def condExpr(altToken: Token): Tree = {
if (in.token == LPAREN) {
- val t = atSpan(in.offset) { Parens(inParens(exprInParens())) }
- if (in.token == altToken) in.nextToken()
+ var t: Tree = atSpan(in.offset) { Parens(inParens(exprInParens())) }
+ if (in.token != altToken && followedByToken(altToken))
+ t = inSepRegion(LPAREN, RPAREN) {
+ newLineOpt()
+ expr1Rest(postfixExprRest(simpleExprRest(t)), Location.ElseWhere)
+ }
+ if (in.token == altToken) {
+ if (rewriteToOldSyntax()) revertToParens(t)
+ in.nextToken()
+ }
+ else if (rewriteToNewSyntax(t.span))
+ dropParensOrBraces(t.span.start, s"${tokenString(altToken)}")
t
} else {
- val t = expr()
+ val t = inSepRegion(LPAREN, RPAREN)(expr())
+ if (rewriteToOldSyntax(t.span.startPos)) revertToParens(t)
accept(altToken)
t
}
@@ -1333,7 +1494,7 @@ object Parsers {
in.errorOrMigrationWarning(
i"""`do
while ' is no longer supported,
|use `while ({ ; }) ()' instead.
- |The statement can be rewritten automatically under -language:Scala2 -migration -rewrite.
+ |${rewriteNotice("-language:Scala2")}
""")
val start = in.skipToken()
atSpan(start) {
@@ -1342,7 +1503,7 @@ object Parsers {
val whileStart = in.offset
accept(WHILE)
val cond = expr()
- if (ctx.settings.migration.value) {
+ if (in.isScala2Mode) {
patch(source, Span(start, start + 2), "while ({")
patch(source, Span(whileStart, whileStart + 5), ";")
cond match {
@@ -1576,8 +1737,10 @@ object Parsers {
* | InfixExpr id [nl] InfixExpr
* | InfixExpr ‘given’ (InfixExpr | ParArgumentExprs)
*/
- def postfixExpr(): Tree =
- infixOps(prefixExpr(), canStartExpressionTokens, prefixExpr, maybePostfix = true)
+ def postfixExpr(): Tree = postfixExprRest(prefixExpr())
+
+ def postfixExprRest(t: Tree): Tree =
+ infixOps(t, canStartExpressionTokens, prefixExpr, maybePostfix = true)
/** PrefixExpr ::= [`-' | `+' | `~' | `!'] SimpleExpr
*/
@@ -1799,8 +1962,13 @@ object Parsers {
def enumerators(): List[Tree] = generator() :: enumeratorsRest()
def enumeratorsRest(): List[Tree] =
- if (isStatSep) { in.nextToken(); enumerator() :: enumeratorsRest() }
- else if (in.token == IF) guard() :: enumeratorsRest()
+ if (isStatSep) {
+ in.nextToken()
+ if (in.token == DO || in.token == YIELD || in.token == RBRACE) Nil
+ else enumerator() :: enumeratorsRest()
+ }
+ else if (in.token == IF)
+ guard() :: enumeratorsRest()
else Nil
/** Enumerator ::= Generator
@@ -1838,13 +2006,16 @@ object Parsers {
*/
def forExpr(): Tree = atSpan(in.skipToken()) {
var wrappedEnums = true
+ val start = in.offset
+ val forEnd = in.lastOffset
+ val leading = in.token
val enums =
- if (in.token == LBRACE) inBraces(enumerators())
- else if (in.token == LPAREN) {
- val lparenOffset = in.skipToken()
- openParens.change(LPAREN, 1)
+ if (leading == LBRACE || leading == LPAREN && parensEncloseGenerators) {
+ in.nextToken()
+ openParens.change(leading, 1)
val res =
- if (in.token == CASE) enumerators()
+ if (leading == LBRACE || in.token == CASE)
+ enumerators()
else {
val pats = patternsOpt()
val pat =
@@ -1852,23 +2023,55 @@ object Parsers {
wrappedEnums = false
accept(RPAREN)
openParens.change(LPAREN, -1)
- atSpan(lparenOffset) { makeTupleOrParens(pats) } // note: alternatives `|' need to be weeded out by typer.
+ atSpan(start) { makeTupleOrParens(pats) } // note: alternatives `|' need to be weeded out by typer.
}
else pats.head
generatorRest(pat, casePat = false) :: enumeratorsRest()
}
if (wrappedEnums) {
- accept(RPAREN)
- openParens.change(LPAREN, -1)
+ val closingOnNewLine = in.isAfterLineEnd()
+ accept(leading + 1)
+ openParens.change(leading, -1)
+ def hasMultiLineEnum =
+ res.exists { t =>
+ val pos = t.sourcePos
+ pos.startLine < pos.endLine
+ }
+ if (rewriteToNewSyntax(Span(start)) && (leading == LBRACE || !hasMultiLineEnum)) {
+ // Don't rewrite if that could change meaning of newlines
+ newLinesOpt()
+ dropParensOrBraces(start, if (in.token == YIELD || in.token == DO) "" else "do")
+ }
}
res
- } else {
+ }
+ else {
wrappedEnums = false
- enumerators()
+
+ /*if (in.token == INDENT) inBracesOrIndented(enumerators()) else*/
+ val ts = inSepRegion(LBRACE, RBRACE)(enumerators())
+ if (rewriteToOldSyntax(Span(start)) && ts.nonEmpty) {
+ if (ts.length > 1 && ts.head.sourcePos.startLine != ts.last.sourcePos.startLine) {
+ patch(source, Span(forEnd), " {")
+ patch(source, Span(in.offset), "} ")
+ }
+ else {
+ patch(source, ts.head.span.startPos, "(")
+ patch(source, ts.last.span.endPos, ")")
+ }
+ }
+ ts
}
newLinesOpt()
- if (in.token == YIELD) { in.nextToken(); ForYield(enums, expr()) }
- else if (in.token == DO) { in.nextToken(); ForDo(enums, expr()) }
+ if (in.token == YIELD) {
+ in.nextToken()
+ ForYield(enums, expr())
+ }
+ else if (in.token == DO) {
+ if (rewriteToOldSyntax()) dropTerminator()
+ in.nextToken()
+ ForDo(enums, expr())
+ }
else {
if (!wrappedEnums) syntaxErrorOrIncomplete(YieldOrDoExpectedInForComprehension())
ForDo(enums, expr())
@@ -2675,7 +2878,7 @@ object Parsers {
}
/** ConstrExpr ::= SelfInvocation
- * | ConstrBlock
+ * | `{' SelfInvocation {semi BlockStat} `}'
*/
def constrExpr(): Tree =
if (in.token == LBRACE) constrBlock()
diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
index 78bed73dce3a..b5a8c7686597 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala
@@ -219,6 +219,13 @@ object Scanners {
class Scanner(source: SourceFile, override val startFrom: Offset = 0)(implicit ctx: Context) extends ScannerCommon(source)(ctx) {
val keepComments: Boolean = !ctx.settings.YdropComments.value
+ /** A switch whether operators at the start of lines can be infix operators */
+ private var allowLeadingInfixOperators = true
+
+ val rewrite = ctx.settings.rewrite.value.isDefined
+ val oldSyntax = ctx.settings.oldSyntax.value
+ val newSyntax = ctx.settings.newSyntax.value
+
/** All doc comments kept by their end position in a `Map` */
private[this] var docstringMap: SortedMap[Int, Comment] = SortedMap.empty
@@ -233,7 +240,7 @@ object Scanners {
def nextPos: Int = (lookahead.getc(): @switch) match {
case ' ' | '\t' => nextPos
case CR | LF | FF =>
- // if we encounter line delimitng whitespace we don't count it, since
+ // if we encounter line delimiting whitespace we don't count it, since
// it seems not to affect positions in source
nextPos - 1
case _ => lookahead.charOffset - 1
@@ -265,12 +272,12 @@ object Scanners {
else IDENTIFIER
}
- private class TokenData0 extends TokenData
+ def newTokenData: TokenData = new TokenData {}
/** We need one token lookahead and one token history
*/
- val next : TokenData = new TokenData0
- private val prev : TokenData = new TokenData0
+ val next = newTokenData
+ private val prev = newTokenData
/** a stack of tokens which indicates whether line-ends can be statement separators
* also used for keeping track of nesting levels.
@@ -378,6 +385,30 @@ object Scanners {
next.token = EMPTY
}
+ def insertNL(nl: Token): Unit = {
+ next.copyFrom(this)
+ // todo: make offset line-end of previous line?
+ offset = if (lineStartOffset <= offset) lineStartOffset else lastLineStartOffset
+ token = nl
+ }
+
+
+ /** A leading symbolic or backquoted identifier is treated as an infix operator
+ * if it is followed by at least one ' ' and a token on the same line
+ * that can start an expression.
+ */
+ def isLeadingInfixOperator =
+ allowLeadingInfixOperators &&
+ (token == BACKQUOTED_IDENT ||
+ token == IDENTIFIER && isOperatorPart(name(name.length - 1))) &&
+ (ch == ' ') && {
+ val lookahead = lookaheadScanner
+ lookahead.allowLeadingInfixOperators = false
+ // force a NEWLINE a after current token if it is on its own line
+ lookahead.nextToken()
+ canStartExpressionTokens.contains(lookahead.token)
+ }
+
/** Insert NEWLINE or NEWLINES if
* - we are after a newline
* - we are within a { ... } or on toplevel (wrt sepRegions)
@@ -389,10 +420,15 @@ object Scanners {
(canStartStatTokens contains token) &&
(sepRegions.isEmpty || sepRegions.head == RBRACE ||
sepRegions.head == ARROW && token == CASE)) {
- next copyFrom this
- // todo: make offset line-end of previous line?
- offset = if (lineStartOffset <= offset) lineStartOffset else lastLineStartOffset
- token = if (pastBlankLine()) NEWLINES else NEWLINE
+ if (pastBlankLine())
+ insertNL(NEWLINES)
+ else if (!isLeadingInfixOperator)
+ insertNL(NEWLINE)
+ else if (isScala2Mode || oldSyntax)
+ ctx.warning(em"""Line starts with an operator;
+ |it is now treated as a continuation of the expression on the previous line,
+ |not as a separate statement.""",
+ source.atSpan(Span(offset)))
}
postProcessToken()
@@ -1087,8 +1123,6 @@ object Scanners {
case _ => showToken(token)
}
-// (does not seem to be needed) def flush = { charOffset = offset; nextChar(); this }
-
/* Resume normal scanning after XML */
def resume(lastToken: Token): Unit = {
token = lastToken
diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
index 0e7d9754fcde..4f458cf9d58c 100644
--- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
+++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala
@@ -251,6 +251,12 @@ object Tokens extends TokensCommon {
final val canEndStatTokens: TokenSet = atomicExprTokens | BitSet(
TYPE, RPAREN, RBRACE, RBRACKET)
+ /** Tokens that stop a lookahead scan search for a `<-`, `then`, or `do`.
+ * Used for disambiguating between old and new syntax.
+ */
+ final val stopScanTokens: BitSet = mustStartStatTokens |
+ BitSet(IF, ELSE, WHILE, DO, FOR, YIELD, NEW, TRY, CATCH, FINALLY, THROW, RETURN, MATCH, SEMI, EOF)
+
final val numericLitTokens: TokenSet = BitSet(INTLIT, LONGLIT, FLOATLIT, DOUBLELIT)
final val scala3keywords = BitSet(ENUM, ERASED, GIVEN, IMPLIED)
diff --git a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
index b36a6a7097aa..b08f79904337 100644
--- a/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
+++ b/compiler/src/dotty/tools/dotc/typer/TypeAssigner.scala
@@ -271,7 +271,13 @@ trait TypeAssigner {
|An extension method was tried, but could not be fully constructed:
|
| ${failure.tree.show.replace("\n", "\n ")}"""
- case _ => ""
+ case _ =>
+ if (tree.hasAttachment(desugar.MultiLineInfix))
+ i""".
+ |Note that `$name` is treated as an infix operator in Scala 3.
+ |If you do not want that, insert a `;` or empty line in front
+ |or drop any spaces behind the operator."""
+ else ""
}
errorType(NotAMember(qualType, name, kind, addendum), tree.sourcePos)
}
diff --git a/docs/docs/reference/changed-features/operators.md b/docs/docs/reference/changed-features/operators.md
index 0a618e0426a3..e8900740d76d 100644
--- a/docs/docs/reference/changed-features/operators.md
+++ b/docs/docs/reference/changed-features/operators.md
@@ -3,7 +3,8 @@ layout: doc-page
title: Rules for Operators
---
-There are two annotations that regulate operators: `infix` and `alpha`.
+The rules for infix operators have changed. There are two annotations that regulate operators: `infix` and `alpha`.
+Furthermore, a syntax change allows infix operators to be written on the left in a multi-line expression.
## The @alpha Annotation
@@ -127,3 +128,53 @@ The purpose of the `@infix` annotation is to achieve consistency across a code b
5. To smooth migration to Scala 3.0, alphanumeric operations will only be deprecated from Scala 3.1 onwards,
or if the `-strict` option is given in Dotty/Scala 3.
+
+## Syntax Change
+
+Infix operators can now appear at the start of lines in a multi-line expression. Examples:
+```scala
+val str = "hello"
+ ++ " world"
+ ++ "!"
+
+def condition =
+ x > 0
+ || xs.exists(_ > 0)
+ || xs.isEmpty
+```
+Previously, these expressions would have been rejected, since the compiler's semicolon inference
+would have treated the continuations `++ " world"` or `|| xs.isEmpty` as separate statements.
+
+To make this syntax work, the rules are modified to not infer semicolons in front of leading infix operators.
+A _leading infix operator_ is
+ - a symbolic identifier such as `+`, or `approx_==`, or an identifier in backticks,
+ - that starts a new line,
+ - that precedes a token on the same line that can start an expression,
+ - and that is immediately followed by at least one space character `' '`.
+
+Example:
+
+```scala
+ freezing
+ | boiling
+```
+This is recognized as a single infix operation. Compare with:
+```scala
+ freezing
+ !boiling
+```
+This is seen as two statements, `freezing` and `!boiling`. The difference is that only the operator in the first example
+is followed by a space.
+
+Another example:
+```scala
+ println("hello")
+ ???
+ ??? match { case 0 => 1 }
+```
+This code is recognized as three different statements. `???` is syntactically a symbolic identifier, but
+neither of its occurrences is followed by a space and a token that can start an expression.
+
+
+
+
diff --git a/docs/docs/reference/dropped-features/do-while.md b/docs/docs/reference/dropped-features/do-while.md
new file mode 100644
index 000000000000..ed2d07c95965
--- /dev/null
+++ b/docs/docs/reference/dropped-features/do-while.md
@@ -0,0 +1,49 @@
+---
+layout: doc-page
+title: Dropped: Do-While
+---
+
+The syntax construct
+```scala
+ do while
+```
+is no longer supported. Instead, it is recommended to use the equivalent `while` loop
+below:
+```scala
+ while ({ ; }) ()
+```
+For instance, instead of
+```scala
+ do
+ i += 1
+ while (f(i) == 0)
+```
+one writes
+```scala
+ while ({
+ i += 1
+ f(i) == 0
+ }) ()
+```
+Under the [new syntax rules](../other-new-features/control-syntax), this code can be written also without the awkward `({...})` bracketing like this:
+```scala
+ while {
+ i += 1
+ f(i) == 0
+ } do ()
+```
+The idea to use a block as the condition of a while also gives a solution
+to the "loop-and-a-half" problem. For instance:
+```scala
+ while {
+ val x: Int = iterator.next
+ x >= 0
+ } do print(".")
+```
+
+### Why Drop The Construct?
+
+ - `do-while` is used relatively rarely and it can expressed faithfully using just while. So there seems to be little point in having it as a separate syntax construct.
+ - Under the [new syntax rules](../other-new-features/control-syntax) `do` is used
+ as a statement continuation, which would clash with its meaning as a statement
+ introduction.
diff --git a/docs/docs/reference/other-new-features/control-syntax.md b/docs/docs/reference/other-new-features/control-syntax.md
new file mode 100644
index 000000000000..455474171eca
--- /dev/null
+++ b/docs/docs/reference/other-new-features/control-syntax.md
@@ -0,0 +1,38 @@
+---
+layout: doc-page
+title: New Control Syntax
+---
+
+Scala 3 has a new "quiet" syntax for control expressions that does not rely in
+enclosing the condition in parentheses, and also allows to drop parentheses or braces
+around the generators of a `for`-expression. Examples:
+```scala
+if x < 0 then -x else x
+
+while x >= 0 do x = f(x)
+
+for x <- xs if x > 0
+yield x * x
+
+for
+ x <- xs
+ y <- ys
+do
+ println(x + y)
+```
+
+The rules in detail are:
+
+ - The condition of an `if`-expression can be written without enclosing parentheses if it is followed by a `then`.
+ - The condition of a `while`-loop can be written without enclosing parentheses if it is followed by a `do`.
+ - The enumerators of a `for`-expression can be written without enclosing parentheses or braces if they are followed by a `yield` or `do`.
+ - A `do` in a `for`-expression expresses a `for`-loop.
+ - Newline characters are not statement separators in a condition of an `if` or a `while`.
+ So the meaning of newlines is the same no matter whether parentheses are present
+ or absent.
+ - Newline characters are potential statement separators in the enumerators of a `for`-expression.
+
+### Rewrites
+
+The Dotty compiler can rewrite source code from old syntax and new syntax and back.
+When invoked with options `-rewrite -new-syntax` it will rewrite from old to new syntax, dropping parentheses and braces in conditions and enumerators. When invoked with with options `-rewrite -old-syntax` it will rewrite in the reverse direction, inserting parentheses and braces as needed.
diff --git a/docs/sidebar.yml b/docs/sidebar.yml
index 9da7fa35f921..bc007227b073 100644
--- a/docs/sidebar.yml
+++ b/docs/sidebar.yml
@@ -101,6 +101,8 @@ sidebar:
url: docs/reference/other-new-features/tupled-function.html
- title: threadUnsafe Annotation
url: docs/reference/other-new-features/threadUnsafe-annotation.html
+ - title: New Control Syntax
+ url: docs/reference/other-new-features/control-syntax.html
- title: Other Changed Features
subsection:
- title: Structural Types
@@ -143,6 +145,8 @@ sidebar:
url: docs/reference/dropped-features/existential-types.html
- title: Type Projection
url: docs/reference/dropped-features/type-projection.html
+ - title: Do-While
+ url: docs/reference/dropped-features/do-while.html
- title: Procedure Syntax
url: docs/reference/dropped-features/procedure-syntax.html
- title: Package Objects
diff --git a/tests/neg/multiLineOps.scala b/tests/neg/multiLineOps.scala
new file mode 100644
index 000000000000..4f48c18af3ce
--- /dev/null
+++ b/tests/neg/multiLineOps.scala
@@ -0,0 +1,15 @@
+val x = 1
+ + 2
+ +3 // error: Expected a toplevel definition
+
+val b1 = {
+ 22
+ * 22 // ok
+ */*one more*/22 // error: end of statement expected
+} // error: ';' expected, but '}' found
+
+val b2: Boolean = {
+ println(x)
+ ! "hello".isEmpty // error: value ! is not a member of Unit
+}
+
diff --git a/tests/pos/multiLineOps.scala b/tests/pos/multiLineOps.scala
new file mode 100644
index 000000000000..84af353f6f6b
--- /dev/null
+++ b/tests/pos/multiLineOps.scala
@@ -0,0 +1,26 @@
+val x = 1
+ + 2
+ + 3
+
+class Channel {
+ def ! (msg: String): Channel = this
+ def send_! (msg: String): Channel = this
+}
+
+val c = Channel()
+
+def send() =
+ c ! "hello"
+ ! "world"
+ send_! "!"
+
+val b: Boolean =
+ "hello".isEmpty
+ && true &&
+ !"hello".isEmpty
+
+val b2: Boolean = {
+ println(x)
+ !"hello".isEmpty
+ ???
+}
diff --git a/tests/pos/syntaxHeals.scala b/tests/pos/syntaxHeals.scala
new file mode 100644
index 000000000000..3694b9d04894
--- /dev/null
+++ b/tests/pos/syntaxHeals.scala
@@ -0,0 +1,24 @@
+// Compile with -rewrite -pascal-style
+// Then compile again with -rewrite c-style
+// The resulting file is the same as the original one, except for better formatting
+object Test {
+
+ val xs = List(1, 2, 3)
+
+ for(x <- xs)yield x * 2
+
+ for(x <- xs)
+ yield x * 2
+
+ for{ x <- xs; y <- xs }yield x * y
+
+ for{
+ x <- xs
+ y <- xs
+ }yield x * y
+
+ if(xs == Nil)println("yes")
+
+ if(xs == Nil)
+ println("yes")
+}
\ No newline at end of file
diff --git a/tests/pos/syntaxRewrites.scala b/tests/pos/syntaxRewrites.scala
new file mode 100644
index 000000000000..05287faff616
--- /dev/null
+++ b/tests/pos/syntaxRewrites.scala
@@ -0,0 +1,73 @@
+// Compile with -rewrite -pascal-style
+// Then compile again with -rewrite c-style
+// The resulting file is the same as the original one, except for some extra spaces
+// at line ends
+object Test {
+
+ val xs = List(1, 2, 3)
+
+ for (x <- xs) yield x * 2
+
+ for (x <- xs)
+ yield x * 2
+
+ for { x <- xs; y <- xs } yield x * y
+
+ for {
+ x <- xs
+ y <- xs
+ } yield x * y
+
+ for {
+ x <- xs
+ y <- xs
+ } yield x * y
+
+ for { x <- xs }
+ yield x * 2
+
+// -----------------------------------------------
+
+ for (x <- xs) println(x)
+
+ for (x <- xs)
+ println(x)
+
+ for { x <- xs; y <- xs } println(x * y)
+
+ for {
+ x <- xs
+ y <- xs
+ }
+ println(x * y)
+
+ for {
+ x <- xs
+ y <- xs
+ } println(x * y)
+
+ for { x <- xs }
+ println(x)
+
+ if (xs == Nil) println("yes")
+
+ if (xs == Nil)
+ println("yes")
+
+ if (xs == Nil
+ && xs.length == 0)
+ println("yes")
+
+ while (xs == Nil) println("yes")
+
+ while ({
+ val ys = xs ++ xs
+ ys.nonEmpty
+ }) println("yes")
+
+ while ({
+ val ys = xs ++ xs
+ ys.nonEmpty
+ })
+ println("yes")
+}
\ No newline at end of file