diff --git a/compiler/src/dotty/tools/dotc/CompilationUnit.scala b/compiler/src/dotty/tools/dotc/CompilationUnit.scala index 63458e8fb578..c3d9326f180c 100644 --- a/compiler/src/dotty/tools/dotc/CompilationUnit.scala +++ b/compiler/src/dotty/tools/dotc/CompilationUnit.scala @@ -21,6 +21,11 @@ class CompilationUnit(val source: SourceFile) { /** Pickled TASTY binaries, indexed by class. */ var pickled: Map[ClassSymbol, Array[Byte]] = Map() + + /** Will be reset to `true` if `untpdTree` contains `Quote` trees. The information + * is used in phase ReifyQuotes in order to avoid traversing a quote-less tree. + */ + var containsQuotesOrSplices: Boolean = false } object CompilationUnit { diff --git a/compiler/src/dotty/tools/dotc/Compiler.scala b/compiler/src/dotty/tools/dotc/Compiler.scala index e318332826f6..914272f5249c 100644 --- a/compiler/src/dotty/tools/dotc/Compiler.scala +++ b/compiler/src/dotty/tools/dotc/Compiler.scala @@ -47,9 +47,10 @@ class Compiler { List(new sbt.ExtractAPI), // Sends a representation of the API of classes to sbt via callbacks List(new Pickler), // Generate TASTY info List(new LinkAll), // Reload compilation units from TASTY for library code (if needed) + List(new ReifyQuotes), // Turn quoted trees into explicit run-time data structures List(new FirstTransform, // Some transformations to put trees into a canonical form new CheckReentrant, // Internal use only: Check that compiled program has no data races involving global vars - new ElimJavaPackages), // Eliminate syntactic references to Java packages + new ElimPackagePrefixes), // Eliminate references to package prefixes in Select nodes List(new CheckStatic, // Check restrictions that apply to @static members new ElimRepeated, // Rewrite vararg parameters and arguments new NormalizeFlags, // Rewrite some definition flags @@ -59,8 +60,8 @@ class Compiler { new ByNameClosures, // Expand arguments to by-name parameters to closures new LiftTry, // Put try expressions that might execute on non-empty stacks into their own methods new HoistSuperArgs, // Hoist complex arguments of supercalls to enclosing scope - new ClassOf, // Expand `Predef.classOf` calls. - new RefChecks), // Various checks mostly related to abstract members and overriding + new ClassOf, // Expand `Predef.classOf` calls. + new RefChecks), // Various checks mostly related to abstract members and overriding List(new TryCatchPatterns, // Compile cases in try/catch new PatternMatcher, // Compile pattern matches new ExplicitOuter, // Add accessors to outer classes from nested ones. @@ -68,7 +69,7 @@ class Compiler { new ShortcutImplicits, // Allow implicit functions without creating closures new CrossCastAnd, // Normalize selections involving intersection types. new Splitter), // Expand selections involving union types into conditionals - List(new PhantomArgLift, // Extracts the evaluation of phantom arguments placing them before the call. + List(new PhantomArgLift, // Extracts the evaluation of phantom arguments placing them before the call. new VCInlineMethods, // Inlines calls to value class methods new SeqLiterals, // Express vararg arguments as arrays new InterceptedMethods, // Special handling of `==`, `|=`, `getClass` methods diff --git a/compiler/src/dotty/tools/dotc/ast/Desugar.scala b/compiler/src/dotty/tools/dotc/ast/Desugar.scala index e9d02525546e..4fea10f68c8a 100644 --- a/compiler/src/dotty/tools/dotc/ast/Desugar.scala +++ b/compiler/src/dotty/tools/dotc/ast/Desugar.scala @@ -1069,7 +1069,8 @@ object desugar { Select(t, op.name) } case PrefixOp(op, t) => - Select(t, nme.UNARY_PREFIX ++ op.name) + val nspace = if (ctx.mode.is(Mode.Type)) tpnme else nme + Select(t, nspace.UNARY_PREFIX ++ op.name) case Tuple(ts) => val arity = ts.length def tupleTypeRef = defn.TupleType(arity) diff --git a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala index da8759a5f5fc..2b2b167e2a85 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeInfo.scala @@ -264,7 +264,7 @@ trait UntypedTreeInfo extends TreeInfo[Untyped] { self: Trees.Instance[Untyped] */ def lacksDefinition(mdef: MemberDef)(implicit ctx: Context) = mdef match { case mdef: ValOrDefDef => - mdef.unforcedRhs == EmptyTree && !mdef.name.isConstructorName && !mdef.mods.is(ParamAccessor) + mdef.unforcedRhs == EmptyTree && !mdef.name.isConstructorName && !mdef.mods.is(TermParamOrAccessor) case mdef: TypeDef => def isBounds(rhs: Tree): Boolean = rhs match { case _: TypeBoundsTree => true diff --git a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala index 68bd65696e64..a0b580118157 100644 --- a/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala +++ b/compiler/src/dotty/tools/dotc/ast/TreeTypeMap.scala @@ -31,7 +31,7 @@ import dotty.tools.dotc.transform.SymUtils._ * gets two different denotations in the same period. Hence, if -Yno-double-bindings is * set, we would get a data race assertion error. */ -final class TreeTypeMap( +class TreeTypeMap( val typeMap: Type => Type = IdentityTypeMap, val treeMap: tpd.Tree => tpd.Tree = identity _, val oldOwners: List[Symbol] = Nil, @@ -154,7 +154,7 @@ final class TreeTypeMap( assert(!to.exists(substFrom contains _)) assert(!from.exists(newOwners contains _)) assert(!to.exists(oldOwners contains _)) - new TreeTypeMap( + newMap( typeMap, treeMap, from ++ oldOwners, @@ -163,6 +163,16 @@ final class TreeTypeMap( to ++ substTo) } + /** A new map of the same class this one */ + protected def newMap( + typeMap: Type => Type, + treeMap: Tree => Tree, + oldOwners: List[Symbol], + newOwners: List[Symbol], + substFrom: List[Symbol], + substTo: List[Symbol])(implicit ctx: Context) = + new TreeTypeMap(typeMap, treeMap, oldOwners, newOwners, substFrom, substTo) + /** Apply `typeMap` and `ownerMap` to given symbols `syms` * and return a treemap that contains the substitution * between original and mapped symbols. diff --git a/compiler/src/dotty/tools/dotc/ast/tpd.scala b/compiler/src/dotty/tools/dotc/ast/tpd.scala index b27c16d0eb00..eecac47fe704 100644 --- a/compiler/src/dotty/tools/dotc/ast/tpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/tpd.scala @@ -104,6 +104,12 @@ object tpd extends Trees.Instance[Type] with TypedTreeInfo { Closure(Nil, call, targetTpt)) } + /** A closure whole anonymous function has the given method type */ + def Lambda(tpe: MethodType, rhsFn: List[Tree] => Tree)(implicit ctx: Context): Block = { + val meth = ctx.newSymbol(ctx.owner, nme.ANON_FUN, Synthetic | Method, tpe) + Closure(meth, tss => rhsFn(tss.head).changeOwner(ctx.owner, meth)) + } + def CaseDef(pat: Tree, guard: Tree, body: Tree)(implicit ctx: Context): CaseDef = ta.assignType(untpd.CaseDef(pat, guard, body), body) diff --git a/compiler/src/dotty/tools/dotc/ast/untpd.scala b/compiler/src/dotty/tools/dotc/ast/untpd.scala index e4b34d59c189..cd6952ed16ab 100644 --- a/compiler/src/dotty/tools/dotc/ast/untpd.scala +++ b/compiler/src/dotty/tools/dotc/ast/untpd.scala @@ -69,7 +69,10 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case class InfixOp(left: Tree, op: Ident, right: Tree) extends OpTree case class PostfixOp(od: Tree, op: Ident) extends OpTree - case class PrefixOp(op: Ident, od: Tree) extends OpTree + case class PrefixOp(op: Ident, od: Tree) extends OpTree { + override def isType = op.isType + override def isTerm = op.isTerm + } case class Parens(t: Tree) extends ProxyTree { def forwardTo = t } @@ -78,6 +81,7 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { override def isType = !isTerm } case class Throw(expr: Tree) extends TermTree + case class Quote(expr: Tree) extends TermTree case class WhileDo(cond: Tree, body: Tree) extends TermTree case class DoWhile(body: Tree, cond: Tree) extends TermTree case class ForYield(enums: List[Tree], expr: Tree) extends TermTree @@ -449,6 +453,10 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { case tree: Throw if expr eq tree.expr => tree case _ => finalize(tree, untpd.Throw(expr)) } + def Quote(tree: Tree)(expr: Tree) = tree match { + case tree: Quote if expr eq tree.expr => tree + case _ => finalize(tree, untpd.Quote(expr)) + } def WhileDo(tree: Tree)(cond: Tree, body: Tree) = tree match { case tree: WhileDo if (cond eq tree.cond) && (body eq tree.body) => tree case _ => finalize(tree, untpd.WhileDo(cond, body)) @@ -507,6 +515,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { cpy.Tuple(tree)(transform(trees)) case Throw(expr) => cpy.Throw(tree)(transform(expr)) + case Quote(expr) => + cpy.Quote(tree)(transform(expr)) case WhileDo(cond, body) => cpy.WhileDo(tree)(transform(cond), transform(body)) case DoWhile(body, cond) => @@ -554,6 +564,8 @@ object untpd extends Trees.Instance[Untyped] with UntypedTreeInfo { this(x, trees) case Throw(expr) => this(x, expr) + case Quote(expr) => + this(x, expr) case WhileDo(cond, body) => this(this(x, cond), body) case DoWhile(body, cond) => diff --git a/compiler/src/dotty/tools/dotc/config/Printers.scala b/compiler/src/dotty/tools/dotc/config/Printers.scala index b0936864c426..d914d4b6d885 100644 --- a/compiler/src/dotty/tools/dotc/config/Printers.scala +++ b/compiler/src/dotty/tools/dotc/config/Printers.scala @@ -11,26 +11,27 @@ object Printers { } val default: Printer = new Printer - val dottydoc: Printer = noPrinter - val core: Printer = noPrinter - val typr: Printer = noPrinter + val constr: Printer = noPrinter + val core: Printer = noPrinter val checks: Printer = noPrinter - val overload: Printer = noPrinter - val implicits: Printer = noPrinter - val implicitsDetailed: Printer = noPrinter - val subtyping: Printer = noPrinter - val unapp: Printer = noPrinter - val gadts: Printer = noPrinter - val hk: Printer = noPrinter - val variances: Printer = noPrinter - val incremental: Printer = noPrinter val config: Printer = noPrinter - val transforms: Printer = noPrinter val cyclicErrors: Printer = noPrinter - val pickling: Printer = noPrinter - val inlining: Printer = noPrinter + val dottydoc: Printer = noPrinter val exhaustivity: Printer = noPrinter + val incremental: Printer = noPrinter + val gadts: Printer = noPrinter + val hk: Printer = noPrinter + val implicits: Printer = noPrinter + val implicitsDetailed: Printer = noPrinter + val inlining: Printer = noPrinter + val overload: Printer = noPrinter val patmatch: Printer = noPrinter + val pickling: Printer = noPrinter val simplify: Printer = noPrinter + val subtyping: Printer = noPrinter + val transforms: Printer = noPrinter + val typr: Printer = noPrinter + val unapp: Printer = noPrinter + val variances: Printer = noPrinter } diff --git a/compiler/src/dotty/tools/dotc/core/Contexts.scala b/compiler/src/dotty/tools/dotc/core/Contexts.scala index fe5f5b6d4c62..629794efb74a 100644 --- a/compiler/src/dotty/tools/dotc/core/Contexts.scala +++ b/compiler/src/dotty/tools/dotc/core/Contexts.scala @@ -359,6 +359,15 @@ object Contexts { else if (untpd.isSuperConstrCall(stat) && this.owner.isClass) superCallContext else ctx.fresh.setOwner(exprOwner) + /** A new context that summarizes an import statement */ + def importContext(imp: Import[_], sym: Symbol) = { + val impNameOpt = imp.expr match { + case ref: RefTree[_] => Some(ref.name.asTermName) + case _ => None + } + ctx.fresh.setImportInfo(new ImportInfo(implicit ctx => sym, imp.selectors, impNameOpt)) + } + /** The current source file; will be derived from current * compilation unit. */ diff --git a/compiler/src/dotty/tools/dotc/core/Decorators.scala b/compiler/src/dotty/tools/dotc/core/Decorators.scala index 094d4092ac1a..ec8563bf3f8b 100644 --- a/compiler/src/dotty/tools/dotc/core/Decorators.scala +++ b/compiler/src/dotty/tools/dotc/core/Decorators.scala @@ -175,6 +175,10 @@ object Decorators { recur(enclosingInlineds, pos) } + implicit class reportingDeco[T](val x: T) extends AnyVal { + def reporting(op: T => String): T = { println(op(x)); x } + } + implicit class StringInterpolators(val sc: StringContext) extends AnyVal { /** General purpose string formatting */ diff --git a/compiler/src/dotty/tools/dotc/core/Definitions.scala b/compiler/src/dotty/tools/dotc/core/Definitions.scala index e3bcd1e1fda0..19dab5bac91a 100644 --- a/compiler/src/dotty/tools/dotc/core/Definitions.scala +++ b/compiler/src/dotty/tools/dotc/core/Definitions.scala @@ -5,7 +5,6 @@ package core import Types._, Contexts._, Symbols._, Denotations._, SymDenotations._, StdNames._, Names._ import Flags._, Scopes._, Decorators._, NameOps._, util.Positions._, Periods._ import unpickleScala2.Scala2Unpickler.ensureConstructor -import scala.annotation.{ switch, meta } import scala.collection.{ mutable, immutable } import PartialFunction._ import collection.mutable @@ -146,11 +145,20 @@ class Definitions { } private def enterPolyMethod(cls: ClassSymbol, name: TermName, typeParamCount: Int, - resultTypeFn: PolyType => Type, flags: FlagSet = EmptyFlags) = { + resultTypeFn: PolyType => Type, flags: FlagSet = EmptyFlags, + useCompleter: Boolean = false) = { val tparamNames = PolyType.syntheticParamNames(typeParamCount) val tparamInfos = tparamNames map (_ => TypeBounds.empty) - val ptype = PolyType(tparamNames)(_ => tparamInfos, resultTypeFn) - enterMethod(cls, name, ptype, flags) + def ptype = PolyType(tparamNames)(_ => tparamInfos, resultTypeFn) + val info = + if (useCompleter) + new LazyType { + def complete(denot: SymDenotation)(implicit ctx: Context): Unit = { + denot.info = ptype + } + } + else ptype + enterMethod(cls, name, info, flags) } private def enterT1ParameterlessMethod(cls: ClassSymbol, name: TermName, resultTypeFn: PolyType => Type, flags: FlagSet) = @@ -290,14 +298,23 @@ class Definitions { /** Marker method to indicate an argument to a call-by-name parameter. * Created by byNameClosures and elimByName, eliminated by Erasure, */ - lazy val cbnArg = enterPolyMethod( - OpsPackageClass, nme.cbnArg, 1, + lazy val cbnArg = enterPolyMethod(OpsPackageClass, nme.cbnArg, 1, pt => MethodType(List(FunctionOf(Nil, pt.paramRefs(0))), pt.paramRefs(0))) /** Method representing a throw */ lazy val throwMethod = enterMethod(OpsPackageClass, nme.THROWkw, MethodType(List(ThrowableType), NothingType)) + /** Method representing a term quote */ + lazy val quoteMethod = enterPolyMethod(OpsPackageClass, nme.QUOTE, 1, + pt => MethodType(pt.paramRefs(0) :: Nil, QuotedExprType.appliedTo(pt.paramRefs(0) :: Nil)), + useCompleter = true) + + /** Method representing a type quote */ + lazy val typeQuoteMethod = enterPolyMethod(OpsPackageClass, nme.TYPE_QUOTE, 1, + pt => QuotedTypeType.appliedTo(pt.paramRefs(0) :: Nil), + useCompleter = true) + lazy val NothingClass: ClassSymbol = enterCompleteClassSymbol( ScalaPackageClass, tpnme.Nothing, AbstractFinal, List(AnyClass.typeRef)) def NothingType = NothingClass.typeRef @@ -585,6 +602,21 @@ class Definitions { def ClassTagClass(implicit ctx: Context) = ClassTagType.symbol.asClass def ClassTagModule(implicit ctx: Context) = ClassTagClass.companionModule + lazy val QuotedExprType = ctx.requiredClassRef("scala.quoted.Expr") + def QuotedExprClass(implicit ctx: Context) = QuotedExprType.symbol.asClass + + def QuotedExpr_~(implicit ctx: Context) = QuotedExprClass.requiredMethod(nme.UNARY_~) + def QuotedExpr_run(implicit ctx: Context) = QuotedExprClass.requiredMethod(nme.run) + + lazy val QuotedTypeType = ctx.requiredClassRef("scala.quoted.Type") + def QuotedTypeClass(implicit ctx: Context) = QuotedTypeType.symbol.asClass + + def QuotedType_~(implicit ctx: Context) = + QuotedTypeClass.info.member(tpnme.UNARY_~).symbol.asType + + def Unpickler_unpickleExpr = ctx.requiredMethod("scala.runtime.quoted.Unpickler.unpickleExpr") + def Unpickler_unpickleType = ctx.requiredMethod("scala.runtime.quoted.Unpickler.unpickleType") + lazy val EqType = ctx.requiredClassRef("scala.Eq") def EqClass(implicit ctx: Context) = EqType.symbol.asClass def EqModule(implicit ctx: Context) = EqClass.companionModule @@ -1071,7 +1103,8 @@ class Definitions { OpsPackageClass) /** Lists core methods that don't have underlying bytecode, but are synthesized on-the-fly in every reflection universe */ - lazy val syntheticCoreMethods = AnyMethods ++ ObjectMethods ++ List(String_+, throwMethod) + lazy val syntheticCoreMethods = + AnyMethods ++ ObjectMethods ++ List(String_+, throwMethod, quoteMethod, typeQuoteMethod) lazy val reservedScalaClassNames: Set[Name] = syntheticScalaClasses.map(_.name).toSet @@ -1081,13 +1114,13 @@ class Definitions { def init()(implicit ctx: Context) = { this.ctx = ctx if (!_isInitialized) { - // force initialization of every symbol that is synthesized or hijacked by the compiler - val forced = syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() - // Enter all symbols from the scalaShadowing package in the scala package for (m <- ScalaShadowingPackageClass.info.decls) ScalaPackageClass.enter(m) + // force initialization of every symbol that is synthesized or hijacked by the compiler + val forced = syntheticCoreClasses ++ syntheticCoreMethods ++ ScalaValueClasses() + _isInitialized = true } } diff --git a/compiler/src/dotty/tools/dotc/core/Flags.scala b/compiler/src/dotty/tools/dotc/core/Flags.scala index 66f17a3c7f85..f1cd9db376a3 100644 --- a/compiler/src/dotty/tools/dotc/core/Flags.scala +++ b/compiler/src/dotty/tools/dotc/core/Flags.scala @@ -414,7 +414,7 @@ object Flags { /** A Scala 2.12 or higher trait */ final val Scala_2_12_Trait = typeFlag(58, "") - /** A macro (Scala 2.x only) */ + /** A macro */ final val Macro = commonFlag(59, "") /** A method that is known to have inherited default parameters */ @@ -575,7 +575,7 @@ object Flags { final val SyntheticOrPrivate = Synthetic | Private /** A deferred member or a parameter accessor (these don't have right hand sides) */ - final val DeferredOrParamAccessor = Deferred | ParamAccessor + final val DeferredOrParamOrAccessor = Deferred | Param | ParamAccessor /** value that's final or inline */ final val FinalOrInline = Final | Inline @@ -598,6 +598,9 @@ object Flags { /** Is a default parameter in Scala 2*/ final val DefaultParameter = allOf(Param, DefaultParameterized) + /** A Scala 2 Macro */ + final val Scala2Macro = allOf(Macro, Scala2x) + /** A trait that does not need to be initialized */ final val NoInitsTrait = allOf(Trait, NoInits) diff --git a/compiler/src/dotty/tools/dotc/core/Phases.scala b/compiler/src/dotty/tools/dotc/core/Phases.scala index 61732763addc..0abc540448a0 100644 --- a/compiler/src/dotty/tools/dotc/core/Phases.scala +++ b/compiler/src/dotty/tools/dotc/core/Phases.scala @@ -276,6 +276,10 @@ object Phases { * and type applications. */ def relaxedTyping: Boolean = false + + /** If set, implicit search is enabled */ + def allowsImplicitSearch: Boolean = false + /** List of names of phases that should precede this phase */ def runsAfter: Set[Class[_ <: Phase]] = Set.empty diff --git a/compiler/src/dotty/tools/dotc/core/StdNames.scala b/compiler/src/dotty/tools/dotc/core/StdNames.scala index be886b2b3bd7..aab111a3fdf8 100644 --- a/compiler/src/dotty/tools/dotc/core/StdNames.scala +++ b/compiler/src/dotty/tools/dotc/core/StdNames.scala @@ -143,6 +143,8 @@ object StdNames { val INITIALIZER_PREFIX: N = "initial$" val COMPANION_MODULE_METHOD: N = "companion$module" val COMPANION_CLASS_METHOD: N = "companion$class" + val QUOTE: N = "'" + val TYPE_QUOTE: N = "type_'" val TRAIT_SETTER_SEPARATOR: N = str.TRAIT_SETTER_SEPARATOR // value types (and AnyRef) are all used as terms as well @@ -484,6 +486,7 @@ object StdNames { val reflect : N = "reflect" val reify : N = "reify" val rootMirror : N = "rootMirror" + val run: N = "run" val runOrElse: N = "runOrElse" val runtime: N = "runtime" val runtimeClass: N = "runtimeClass" diff --git a/compiler/src/dotty/tools/dotc/core/Symbols.scala b/compiler/src/dotty/tools/dotc/core/Symbols.scala index 4b8730ead2f5..090dbaed0bf3 100644 --- a/compiler/src/dotty/tools/dotc/core/Symbols.scala +++ b/compiler/src/dotty/tools/dotc/core/Symbols.scala @@ -383,6 +383,9 @@ trait Symbols { this: Context => base.staticRef(path.toTermName).requiredSymbol(_ is Module).asTerm def requiredModuleRef(path: PreName): TermRef = requiredModule(path).termRef + + def requiredMethod(path: PreName): TermSymbol = + base.staticRef(path.toTermName).requiredSymbol(_ is Method).asTerm } object Symbols { diff --git a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala index 8178019aa033..a4847e644765 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/DottyUnpickler.scala @@ -20,7 +20,7 @@ object DottyUnpickler { class TreeSectionUnpickler(posUnpickler: Option[PositionUnpickler]) extends SectionUnpickler[TreeUnpickler]("ASTs") { def unpickle(reader: TastyReader, nameAtRef: NameTable) = - new TreeUnpickler(reader, nameAtRef, posUnpickler) + new TreeUnpickler(reader, nameAtRef, posUnpickler, Seq.empty) } class PositionsSectionUnpickler extends SectionUnpickler[PositionUnpickler]("Positions") { diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala index 82824495513d..a3ae450313ec 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TastyFormat.scala @@ -109,6 +109,7 @@ Standard-Section: "ASTs" TopLevelStat* BYNAMEtpt underlying_Term EMPTYTREE SHARED term_ASTRef + HOLE Length idx_Nat arg_Tree* Application = APPLY Length fn_Term arg_Term* TYPEAPPLY Length fn_Term arg_Type* @@ -180,7 +181,8 @@ Standard-Section: "ASTs" TopLevelStat* IMPLICIT LAZY OVERRIDE - INLINE // macro + INLINE // inline method + MACRO // inline method containing toplevel splices STATIC // mapped to static Java member OBJECT // an object or its class TRAIT // a trait @@ -225,7 +227,7 @@ object TastyFormat { final val header = Array(0x5C, 0xA1, 0xAB, 0x1F) val MajorVersion = 2 - val MinorVersion = 0 + val MinorVersion = 1 /** Tags used to serialize names */ class NameTags { @@ -296,6 +298,7 @@ object TastyFormat { final val SCALA2X = 29 final val DEFAULTparameterized = 30 final val STABLE = 31 + final val MACRO = 32 // Cat. 2: tag Nat @@ -396,6 +399,7 @@ object TastyFormat { final val ANNOTATION = 173 final val TERMREFin = 174 final val TYPEREFin = 175 + final val HOLE = 255 final val firstSimpleTreeTag = UNITconst final val firstNatTreeTag = SHARED @@ -417,6 +421,7 @@ object TastyFormat { | LAZY | OVERRIDE | INLINE + | MACRO | STATIC | OBJECT | TRAIT @@ -470,6 +475,7 @@ object TastyFormat { case LAZY => "LAZY" case OVERRIDE => "OVERRIDE" case INLINE => "INLINE" + case MACRO => "MACRO" case STATIC => "STATIC" case OBJECT => "OBJECT" case TRAIT => "TRAIT" @@ -555,6 +561,7 @@ object TastyFormat { case SUPERtype => "SUPERtype" case TERMREFin => "TERMREFin" case TYPEREFin => "TYPEREFin" + case REFINEDtype => "REFINEDtype" case REFINEDtpt => "REFINEDtpt" case APPLIEDtype => "APPLIEDtype" @@ -576,6 +583,7 @@ object TastyFormat { case ANNOTATION => "ANNOTATION" case PRIVATEqualified => "PRIVATEqualified" case PROTECTEDqualified => "PROTECTEDqualified" + case HOLE => "HOLE" } /** @return If non-negative, the number of leading references (represented as nats) of a length/trees entry. @@ -583,7 +591,7 @@ object TastyFormat { */ def numRefs(tag: Int) = tag match { case VALDEF | DEFDEF | TYPEDEF | TYPEPARAM | PARAM | NAMEDARG | RETURN | BIND | - SELFDEF | REFINEDtype | TERMREFin | TYPEREFin => 1 + SELFDEF | REFINEDtype | TERMREFin | TYPEREFin | HOLE => 1 case RENAMED | PARAMtype => 2 case POLYtype | METHODtype | TYPELAMBDAtype => -1 case _ => 0 diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala index 37e82f8fdf37..87c3cbb554b7 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreePickler.scala @@ -4,7 +4,7 @@ package core package tasty import ast.Trees._ -import ast.untpd +import ast.{untpd, tpd} import TastyFormat._ import Contexts._, Symbols._, Types._, Names._, Constants._, Decorators._, Annotations._, StdNames.tpnme, NameOps._ import collection.mutable @@ -14,14 +14,25 @@ import StdNames.nme import TastyBuffer._ import TypeApplications._ import transform.SymUtils._ +import printing.Printer +import printing.Texts._ import config.Config +object TreePickler { + + case class Hole(idx: Int, args: List[tpd.Tree]) extends tpd.TermTree { + override def fallbackToText(printer: Printer): Text = + s"[[$idx|" ~~ printer.toTextGlobal(args, ", ") ~~ "]]" + } +} + class TreePickler(pickler: TastyPickler) { val buf = new TreeBuffer pickler.newSection("ASTs", buf) + import TreePickler._ import buf._ import pickler.nameBuffer.nameIndex - import ast.tpd._ + import tpd._ private val symRefs = Symbols.newMutableSymbolMap[Addr] private val forwardSymRefs = Symbols.newMutableSymbolMap[List[Addr]] @@ -329,10 +340,10 @@ class TreePickler(pickler: TastyPickler) { case tp: TermRef if name != nme.WILDCARD => // wildcards are pattern bound, need to be preserved as ids. pickleType(tp) - case _ => + case tp => writeByte(if (tree.isType) IDENTtpt else IDENT) pickleName(name) - pickleType(tree.tpe) + pickleType(tp) } case This(qual) => if (qual.isEmpty) pickleType(tree.tpe) @@ -483,7 +494,7 @@ class TreePickler(pickler: TastyPickler) { else { if (!tree.self.isEmpty) registerTreeAddr(tree.self) pickleType { - cinfo.selfInfo match { + selfInfo match { case sym: Symbol => sym.info case tp: Type => tp } @@ -542,6 +553,12 @@ class TreePickler(pickler: TastyPickler) { case TypeBoundsTree(lo, hi) => writeByte(TYPEBOUNDStpt) withLength { pickleTree(lo); pickleTree(hi) } + case Hole(idx, args) => + writeByte(HOLE) + withLength { + writeNat(idx) + args.foreach(pickleTree) + } } catch { case ex: AssertionError => @@ -570,6 +587,7 @@ class TreePickler(pickler: TastyPickler) { if (flags is Case) writeByte(CASE) if (flags is Override) writeByte(OVERRIDE) if (flags is Inline) writeByte(INLINE) + if (flags is Macro) writeByte(MACRO) if (flags is JavaStatic) writeByte(STATIC) if (flags is Module) writeByte(OBJECT) if (flags is Local) writeByte(LOCAL) diff --git a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala index c1a06769a7c4..265437adf4a6 100644 --- a/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala +++ b/compiler/src/dotty/tools/dotc/core/tasty/TreeUnpickler.scala @@ -8,7 +8,7 @@ import StdNames._, Denotations._, Flags._, Constants._, Annotations._ import NameKinds._ import typer.Checking.checkNonCyclic import util.Positions._ -import ast.{tpd, Trees, untpd} +import ast.{tpd, untpd, Trees} import Trees._ import Decorators._ import transform.SymUtils._ @@ -25,7 +25,10 @@ import config.Config * @param tastyName the nametable * @param posUNpicklerOpt the unpickler for positions, if it exists */ -class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpicklerOpt: Option[PositionUnpickler]) { +class TreeUnpickler(reader: TastyReader, + nameAtRef: NameRef => TermName, + posUnpicklerOpt: Option[PositionUnpickler], + splices: Seq[Any]) { import TastyFormat._ import TreeUnpickler._ import tpd._ @@ -383,10 +386,8 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi private def noRhs(end: Addr): Boolean = currentAddr == end || isModifierTag(nextByte) - private def localContext(owner: Symbol)(implicit ctx: Context) = { - val lctx = ctx.fresh.setOwner(owner) - if (owner.isClass) lctx.setScope(owner.unforcedDecls) else lctx.setNewScope - } + private def localContext(owner: Symbol)(implicit ctx: Context) = + ctx.fresh.setOwner(owner) private def normalizeFlags(tag: Int, givenFlags: FlagSet, name: Name, isAbsType: Boolean, rhsIsEmpty: Boolean)(implicit ctx: Context): FlagSet = { val lacksDefinition = @@ -475,7 +476,10 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi ctx.newSymbol(ctx.owner, name, flags, completer, privateWithin, coord) } sym.annotations = annots - ctx.enter(sym) + ctx.owner match { + case cls: ClassSymbol => cls.enter(sym) + case _ => + } registerSym(start, sym) if (isClass) { sym.completer.withDecls(newScope) @@ -520,6 +524,7 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi case LAZY => addFlag(Lazy) case OVERRIDE => addFlag(Override) case INLINE => addFlag(Inline) + case MACRO => addFlag(Macro) case STATIC => addFlag(JavaStatic) case OBJECT => addFlag(Module) case TRAIT => addFlag(Trait) @@ -639,19 +644,18 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi } } + val localCtx = localContext(sym) + def readRhs(implicit ctx: Context) = if (noRhs(end)) EmptyTree else readLater(end, rdr => ctx => rdr.readTerm()(ctx)) - def localCtx = localContext(sym) - def ValDef(tpt: Tree) = ta.assignType(untpd.ValDef(sym.name.asTermName, tpt, readRhs(localCtx)), sym) def DefDef(tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree) = ta.assignType( - untpd.DefDef( - sym.name.asTermName, tparams, vparamss, tpt, readRhs(localCtx)), + untpd.DefDef(sym.name.asTermName, tparams, vparamss, tpt, readRhs(localCtx)), sym) def TypeDef(rhs: Tree) = @@ -665,7 +669,7 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi case DEFDEF => val tparams = readParams[TypeDef](TYPEPARAM)(localCtx) val vparamss = readParamss(localCtx) - val tpt = readTpt() + val tpt = readTpt()(localCtx) val typeParams = tparams.map(_.symbol) val valueParamss = ctx.normalizeIfConstructor( vparamss.nestedMap(_.symbol), name == nme.CONSTRUCTOR) @@ -678,7 +682,7 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi } DefDef(tparams, vparamss, tpt) case VALDEF => - val tpt = readTpt() + val tpt = readTpt()(localCtx) sym.info = tpt.tpe ValDef(tpt) case TYPEDEF | TYPEPARAM => @@ -697,7 +701,7 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi } TypeDef(readTemplate(localCtx)) } else { - val rhs = readTpt() + val rhs = readTpt()(localCtx) sym.info = NoCompleter sym.info = rhs.tpe match { case _: TypeBounds | _: ClassInfo => checkNonCyclic(sym, rhs.tpe, reportErrors = false) @@ -706,7 +710,7 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi TypeDef(rhs) } case PARAM => - val tpt = readTpt() + val tpt = readTpt()(localCtx) if (noRhs(end)) { sym.info = tpt.tpe ValDef(tpt) @@ -745,14 +749,15 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi else NoType cls.info = new TempClassInfo(cls.owner.thisType, cls, cls.unforcedDecls, assumedSelfType) val localDummy = symbolAtCurrent() + val parentCtx = ctx.withOwner(localDummy) assert(readByte() == TEMPLATE) val end = readEnd() val tparams = readIndexedParams[TypeDef](TYPEPARAM) val vparams = readIndexedParams[ValDef](PARAM) val parents = collectWhile(nextByte != SELFDEF && nextByte != DEFDEF) { nextByte match { - case APPLY | TYPEAPPLY => readTerm() - case _ => readTpt() + case APPLY | TYPEAPPLY => readTerm()(parentCtx) + case _ => readTpt()(parentCtx) } } val parentTypes = parents.map(_.tpe.dealias) @@ -766,13 +771,14 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi if (self.isEmpty) NoType else self.tpt.tpe) cls.setNoInitsFlags(fork.indexStats(end)) val constr = readIndexedDef().asInstanceOf[DefDef] + val mappedParents = parents.map(_.changeOwner(localDummy, constr.symbol)) val lazyStats = readLater(end, rdr => implicit ctx => { val stats = rdr.readIndexedStats(localDummy, end) tparams ++ vparams ++ stats }) setPos(start, - untpd.Template(constr, parents, self, lazyStats) + untpd.Template(constr, mappedParents, self, lazyStats) .withType(localDummy.termRef)) } @@ -922,17 +928,11 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi def readLengthTerm(): Tree = { val end = readEnd() - def localNonClassCtx = { - val ctx1 = ctx.fresh.setNewScope - if (ctx.owner.isClass) ctx1.setOwner(ctx1.newLocalDummy(ctx.owner)) else ctx1 - } - def readBlock(mkTree: (List[Tree], Tree) => Tree): Tree = { val exprReader = fork skipTree() - val localCtx = localNonClassCtx - val stats = readStats(ctx.owner, end)(localCtx) - val expr = exprReader.readTerm()(localCtx) + val stats = readStats(ctx.owner, end) + val expr = exprReader.readTerm() mkTree(stats, expr) } @@ -1030,12 +1030,17 @@ class TreeUnpickler(reader: TastyReader, nameAtRef: NameRef => TermName, posUnpi case ANNOTATEDtpt => Annotated(readTpt(), readTerm()) case LAMBDAtpt => - val localCtx = localNonClassCtx - val tparams = readParams[TypeDef](TYPEPARAM)(localCtx) - val body = readTpt()(localCtx) + val tparams = readParams[TypeDef](TYPEPARAM) + val body = readTpt() LambdaTypeTree(tparams, body) case TYPEBOUNDStpt => TypeBoundsTree(readTpt(), readTpt()) + case HOLE => + val idx = readNat() + val args = until(end)(readTerm()) + val splice = splices(idx) + if (args.isEmpty) splice.asInstanceOf[Tree] + else splice.asInstanceOf[Seq[Any] => Tree](args) case _ => readPathTerm() } diff --git a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala index 171ccb641e14..9e75ade74971 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Parsers.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Parsers.scala @@ -857,6 +857,7 @@ object Parsers { /** SimpleType ::= SimpleType TypeArgs * | SimpleType `#' id * | StableId + * | [‘-’ | ‘+’ | ‘~’ | ‘!’] StableId * | Path `.' type * | `(' ArgTypes `)' * | `_' TypeBounds @@ -875,6 +876,8 @@ object Parsers { val start = in.skipToken() typeBounds().withPos(Position(start, in.lastOffset, start)) } + else if (isIdent && nme.raw.isUnary(in.name)) + atPos(in.offset) { PrefixOp(typeIdent(), path(thisOK = true)) } else path(thisOK = false, handleSingletonType) match { case r @ SingletonTypeTree(_) => r case r => convertToTypeId(r) @@ -1282,6 +1285,9 @@ object Parsers { /** SimpleExpr ::= new Template * | BlockExpr + * | ‘'{’ BlockExprContents ‘}’ + * | ‘'(’ ExprsInParens ‘)’ + * | ‘'[’ Type ‘]’ * | SimpleExpr1 [`_'] * SimpleExpr1 ::= literal * | xmlLiteral @@ -1310,6 +1316,15 @@ object Parsers { case LBRACE => canApply = false blockExpr() + case QPAREN => + in.token = LPAREN + atPos(in.offset)(Quote(simpleExpr())) + case QBRACE => + in.token = LBRACE + atPos(in.offset)(Quote(simpleExpr())) + case QBRACKET => + in.token = LBRACKET + atPos(in.offset)(Quote(inBrackets(typ()))) case NEW => canApply = false val start = in.skipToken() @@ -1452,7 +1467,8 @@ object Parsers { } else fn - /** BlockExpr ::= `{' (CaseClauses | Block) `}' + /** BlockExpr ::= `{' BlockExprContents `}' + * BlockExprContents ::= CaseClauses | Block */ def blockExpr(): Tree = atPos(in.offset) { inDefScopeBraces { @@ -2074,8 +2090,6 @@ object Parsers { } } - - private def checkVarArgsRules(vparamss: List[List[untpd.ValDef]]): List[untpd.ValDef] = { def isVarArgs(tpt: Trees.Tree[Untyped]): Boolean = tpt match { case PostfixOp(_, op) if op.name == tpnme.raw.STAR => true diff --git a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala index 5677542c0605..15265e4ca5cb 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Scanners.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Scanners.scala @@ -540,9 +540,13 @@ object Scanners { def fetchSingleQuote() = { nextChar() if (isIdentifierStart(ch)) - charLitOr(() => getIdentRest()) + charLitOr { getIdentRest(); SYMBOLLIT } else if (isOperatorPart(ch) && (ch != '\\')) - charLitOr(() => getOperatorRest()) + charLitOr { getOperatorRest(); SYMBOLLIT } + else if (ch == '(' || ch == '{' || ch == '[') { + val tok = quote(ch) + charLitOr(tok) + } else { getLitChar() if (ch == '\'') { @@ -965,7 +969,7 @@ object Scanners { /** Parse character literal if current character is followed by \', * or follow with given op and return a symbol literal token */ - def charLitOr(op: () => Unit): Unit = { + def charLitOr(op: => Token): Unit = { putChar(ch) nextChar() if (ch == '\'') { @@ -973,11 +977,19 @@ object Scanners { token = CHARLIT setStrVal() } else { - op() - token = SYMBOLLIT + token = op strVal = name.toString + litBuf.clear() } } + + /** The opening quote bracket token corresponding to `c` */ + def quote(c: Char): Token = c match { + case '(' => QPAREN + case '{' => QBRACE + case '[' => QBRACKET + } + override def toString = showTokenDetailed(token) + { if ((identifierTokens contains token) || (literalTokens contains token)) " " + name diff --git a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala index 3a0655e72f75..3c1b3b679944 100644 --- a/compiler/src/dotty/tools/dotc/parsing/Tokens.scala +++ b/compiler/src/dotty/tools/dotc/parsing/Tokens.scala @@ -190,6 +190,9 @@ object Tokens extends TokensCommon { final val SUPERTYPE = 81; enter(SUPERTYPE, ">:") final val HASH = 82; enter(HASH, "#") final val VIEWBOUND = 84; enter(VIEWBOUND, "<%") // TODO: deprecate + final val QPAREN = 85; enter(QPAREN, "'(") + final val QBRACE = 86; enter(QBRACE, "'{") + final val QBRACKET = 87; enter(QBRACKET, "'[") /** XML mode */ final val XMLSTART = 96; enter(XMLSTART, "$XMLSTART$<") // TODO: deprecate @@ -208,7 +211,7 @@ object Tokens extends TokensCommon { USCORE, NULL, THIS, SUPER, TRUE, FALSE, RETURN, XMLSTART) final val canStartExpressionTokens = atomicExprTokens | BitSet( - LBRACE, LPAREN, IF, DO, WHILE, FOR, NEW, TRY, THROW) + LBRACE, LPAREN, QBRACE, QPAREN, IF, DO, WHILE, FOR, NEW, TRY, THROW) final val canStartTypeTokens = literalTokens | identifierTokens | BitSet( THIS, SUPER, USCORE, LPAREN, AT) diff --git a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala index 6cd933a48dc9..ea0f0ef0b3a0 100644 --- a/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/PlainPrinter.scala @@ -53,10 +53,7 @@ class PlainPrinter(_ctx: Context) extends Printer { case AndType(tp1, tp2) => homogenize(tp1) & homogenize(tp2) case OrType(tp1, tp2) => - if (tp1.show > tp2.show) - homogenize(tp1) | homogenize(tp2) - else - homogenize(tp2) | homogenize(tp1) + homogenize(tp1) | homogenize(tp2) case tp: SkolemType => homogenize(tp.info) case tp: LazyRef => @@ -79,24 +76,6 @@ class PlainPrinter(_ctx: Context) extends Printer { private def selfRecName(n: Int) = s"z$n" - /** Render elements alternating with `sep` string */ - protected def toText(elems: Traversable[Showable], sep: String) = - Text(elems map (_ toText this), sep) - - /** Render element within highest precedence */ - protected def toTextLocal(elem: Showable): Text = - atPrec(DotPrec) { elem.toText(this) } - - /** Render element within lowest precedence */ - protected def toTextGlobal(elem: Showable): Text = - atPrec(GlobalPrec) { elem.toText(this) } - - protected def toTextLocal(elems: Traversable[Showable], sep: String) = - atPrec(DotPrec) { toText(elems, sep) } - - protected def toTextGlobal(elems: Traversable[Showable], sep: String) = - atPrec(GlobalPrec) { toText(elems, sep) } - /** If the name of the symbol's owner should be used when you care about * seeing an interesting name: in such cases this symbol is e.g. a method * parameter with a synthetic name, a constructor named "this", an object @@ -199,20 +178,20 @@ class PlainPrinter(_ctx: Context) extends Printer { case tp: TypeLambda => changePrec(GlobalPrec) { "[" ~ paramsText(tp) ~ "]" ~ lambdaHash(tp) ~ - (" => " provided !tp.resultType.isInstanceOf[MethodType]) ~ + (Str(" => ") provided !tp.resultType.isInstanceOf[MethodType]) ~ toTextGlobal(tp.resultType) } case AnnotatedType(tpe, annot) => toTextLocal(tpe) ~ " " ~ toText(annot) case tp: TypeVar => if (tp.isInstantiated) - toTextLocal(tp.instanceOpt) ~ ("^" provided ctx.settings.YprintDebug.value) + toTextLocal(tp.instanceOpt) ~ (Str("^") provided ctx.settings.YprintDebug.value) else { val constr = ctx.typerState.constraint val bounds = if (constr.contains(tp)) constr.fullBounds(tp.origin)(ctx.addMode(Mode.Printing)) else TypeBounds.empty - if (bounds.isAlias) toText(bounds.lo) ~ ("^" provided ctx.settings.YprintDebug.value) + if (bounds.isAlias) toText(bounds.lo) ~ (Str("^") provided ctx.settings.YprintDebug.value) else if (ctx.settings.YshowVarBounds.value) "(" ~ toText(tp.origin) ~ "?" ~ toText(bounds) ~ ")" else toText(tp.origin) } @@ -498,7 +477,7 @@ class PlainPrinter(_ctx: Context) extends Printer { else Text() - nodeName ~ "(" ~ elems ~ tpSuffix ~ ")" ~ (node.pos.toString provided ctx.settings.YprintPos.value) + nodeName ~ "(" ~ elems ~ tpSuffix ~ ")" ~ (Str(node.pos.toString) provided ctx.settings.YprintPos.value) case _ => tree.fallbackToText(this) } @@ -544,6 +523,7 @@ class PlainPrinter(_ctx: Context) extends Printer { def plain = this protected def keywordStr(text: String): String = coloredStr(text, SyntaxHighlighting.KeywordColor) + protected def keywordText(text: String): Text = coloredStr(text, SyntaxHighlighting.KeywordColor) protected def valDefText(text: Text): Text = coloredText(text, SyntaxHighlighting.ValDefColor) protected def typeText(text: Text): Text = coloredText(text, SyntaxHighlighting.TypeColor) protected def literalText(text: Text): Text = coloredText(text, SyntaxHighlighting.LiteralColor) diff --git a/compiler/src/dotty/tools/dotc/printing/Printer.scala b/compiler/src/dotty/tools/dotc/printing/Printer.scala index e0794627425a..f054a3dcb357 100644 --- a/compiler/src/dotty/tools/dotc/printing/Printer.scala +++ b/compiler/src/dotty/tools/dotc/printing/Printer.scala @@ -103,7 +103,27 @@ abstract class Printer { /** Textual representation of info relating to an import clause */ def toText(result: ImportInfo): Text - /** Perform string or text-producing operation `op` so that only a + /** Render element within highest precedence */ + def toTextLocal(elem: Showable): Text = + atPrec(DotPrec) { elem.toText(this) } + + /** Render element within lowest precedence */ + def toTextGlobal(elem: Showable): Text = + atPrec(GlobalPrec) { elem.toText(this) } + + /** Render elements alternating with `sep` string */ + def toText(elems: Traversable[Showable], sep: String) = + Text(elems map (_ toText this), sep) + + /** Render elements within highest precedence */ + def toTextLocal(elems: Traversable[Showable], sep: String) = + atPrec(DotPrec) { toText(elems, sep) } + + /** Render elements within lowest precedence */ + def toTextGlobal(elems: Traversable[Showable], sep: String) = + atPrec(GlobalPrec) { toText(elems, sep) } + + /** Perform string or text-producing operation `op` so that only a * summarized text with given recursion depth is shown */ def summarized[T](depth: Int)(op: => T): T diff --git a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala index 279c3d7642b2..57587541d036 100644 --- a/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala +++ b/compiler/src/dotty/tools/dotc/printing/RefinedPrinter.scala @@ -125,11 +125,11 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { atPrec(InfixPrec) { argText(args.head) } else toTextTuple(args.init) - (keywordStr("implicit ") provided isImplicit) ~ argStr ~ " => " ~ argText(args.last) + (keywordText("implicit ") provided isImplicit) ~ argStr ~ " => " ~ argText(args.last) } def toTextDependentFunction(appType: MethodType): Text = { - ("implicit " provided appType.isImplicitMethod) ~ + (keywordText("implicit ") provided appType.isImplicitMethod) ~ "(" ~ paramsText(appType) ~ ") => " ~ toText(appType.resultType) } @@ -186,7 +186,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { withoutPos(super.toText(tp)) case tp: SelectionProto => return "?{ " ~ toText(tp.name) ~ - (" " provided !tp.name.toSimpleName.last.isLetterOrDigit) ~ + (Str(" ") provided !tp.name.toSimpleName.last.isLetterOrDigit) ~ ": " ~ toText(tp.memberProto) ~ " }" case tp: ViewProto => return toText(tp.argType) ~ " ?=>? " ~ toText(tp.resultType) @@ -272,7 +272,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { if (homogenizedView && mods.flags.isTypeFlags) flagMask &~= Implicit // drop implicit from classes val flags = mods.flags & flagMask val flagsText = if (flags.isEmpty) "" else keywordStr((mods.flags & flagMask).toString) - Text(mods.annotations.map(annotText), " ") ~~ flagsText ~~ (kw provided !suppressKw) + Text(mods.annotations.map(annotText), " ") ~~ flagsText ~~ (Str(kw) provided !suppressKw) } def varianceText(mods: untpd.Modifiers) = @@ -343,7 +343,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { val bodyText = "{" ~~ selfText ~~ toTextGlobal(primaryConstrs ::: body, "\n") ~ "}" - prefix ~ (keywordStr(" extends") provided !ofNew) ~~ parentsText ~~ bodyText + prefix ~ (keywordText(" extends") provided !ofNew) ~~ parentsText ~~ bodyText } def toTextPackageId(pid: Tree): Text = @@ -412,7 +412,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { blockText(stats :+ expr) case If(cond, thenp, elsep) => changePrec(GlobalPrec) { - keywordStr("if ") ~ toText(cond) ~ (keywordStr(" then") provided !cond.isInstanceOf[Parens]) ~~ toText(thenp) ~ optText(elsep)(keywordStr(" else ") ~ _) + keywordStr("if ") ~ toText(cond) ~ (keywordText(" then") provided !cond.isInstanceOf[Parens]) ~~ toText(thenp) ~ optText(elsep)(keywordStr(" else ") ~ _) } case Closure(env, ref, target) => "closure(" ~ (toTextGlobal(env, ", ") ~ " | " provided env.nonEmpty) ~ @@ -458,7 +458,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { case ByNameTypeTree(tpt) => "=> " ~ toTextLocal(tpt) case TypeBoundsTree(lo, hi) => - optText(lo)(" >: " ~ _) ~ optText(hi)(" <: " ~ _) + if (lo eq hi) optText(lo)(" = " ~ _) + else optText(lo)(" >: " ~ _) ~ optText(hi)(" <: " ~ _) case Bind(name, body) => changePrec(InfixPrec) { toText(name) ~ " @ " ~ toText(body) } case Alternative(trees) => @@ -489,10 +490,7 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { def typeDefText(tparamsText: => Text, rhsText: => Text) = dclTextOr { modText(tree.mods, keywordStr("type")) ~~ (varianceText(tree.mods) ~ typeText(nameIdText(tree))) ~ - withEnclosingDef(tree) { - if (tree.hasType) toText(tree.symbol.info) // TODO: always print RHS, once we pickle/unpickle type trees - else tparamsText ~ rhsText - } + withEnclosingDef(tree) { tparamsText ~ rhsText } } def recur(rhs: Tree, tparamsTxt: => Text): Text = rhs match { case impl: Template => @@ -503,6 +501,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { typeDefText(tparamsTxt, toText(rhs)) case LambdaTypeTree(tparams, body) => recur(body, tparamsText(tparams)) + case rhs: TypeTree if rhs.tpe.isInstanceOf[TypeBounds] => + typeDefText(tparamsTxt, toText(rhs)) case rhs => typeDefText(tparamsTxt, optText(rhs)(" = " ~ _)) } @@ -598,6 +598,8 @@ class RefinedPrinter(_ctx: Context) extends PlainPrinter(_ctx) { changePrec(GlobalPrec) { keywordStr("try ") ~ toText(expr) ~ " " ~ keywordStr("catch") ~ " {" ~ toText(handler) ~ "}" ~ optText(finalizer)(keywordStr(" finally ") ~ _) } + case Quote(tree) => + if (tree.isType) "'[" ~ toTextGlobal(tree) ~ "]" else "'(" ~ toTextGlobal(tree) ~ ")" case Thicket(trees) => "Thicket {" ~~ toTextGlobal(trees, "\n") ~~ "}" case _ => diff --git a/compiler/src/dotty/tools/dotc/printing/Texts.scala b/compiler/src/dotty/tools/dotc/printing/Texts.scala index 6745355c9ac6..41e91a5957a1 100644 --- a/compiler/src/dotty/tools/dotc/printing/Texts.scala +++ b/compiler/src/dotty/tools/dotc/printing/Texts.scala @@ -146,8 +146,6 @@ object Texts { def over (that: Text) = if (this.isVertical) Vertical(that :: this.relems) else Vertical(that :: this :: Nil) - - def provided(pred: Boolean) = if (pred) this else Str("") } object Text { @@ -169,6 +167,10 @@ object Texts { /** The given texts `xs`, each on a separate line */ def lines(xs: Traversable[Text]) = Vertical(xs.toList.reverse) + + implicit class textDeco(text: => Text) { + def provided(cond: Boolean): Text = if (cond) text else Str("") + } } case class Str(s: String, lineRange: LineRange = EmptyLineRange) extends Text { diff --git a/compiler/src/dotty/tools/dotc/reporting/trace.scala b/compiler/src/dotty/tools/dotc/reporting/trace.scala index 4d4c0578ffb9..d0184c4c9e67 100644 --- a/compiler/src/dotty/tools/dotc/reporting/trace.scala +++ b/compiler/src/dotty/tools/dotc/reporting/trace.scala @@ -14,14 +14,18 @@ object trace { conditionally(ctx.settings.YdebugTrace.value, question, false)(op) @inline - def conditionally[TC](cond: Boolean, question: => String, show: Boolean)(op: => TC)(implicit ctx: Context): TC = - if (Config.tracingEnabled && cond) apply[TC](question, Printers.default, show)(op) - else op + def conditionally[TC](cond: Boolean, question: => String, show: Boolean)(op: => TC)(implicit ctx: Context): TC = { + def op1 = op + if (Config.tracingEnabled && cond) apply[TC](question, Printers.default, show)(op1) + else op1 + } @inline - def apply[T](question: => String, printer: Printers.Printer, show: Boolean)(op: => T)(implicit ctx: Context): T = - if (!Config.tracingEnabled || printer.eq(config.Printers.noPrinter)) op - else doTrace[T](question, printer, show)(op) + def apply[T](question: => String, printer: Printers.Printer, show: Boolean)(op: => T)(implicit ctx: Context): T = { + def op1 = op + if (!Config.tracingEnabled || printer.eq(config.Printers.noPrinter)) op1 + else doTrace[T](question, printer, show)(op1) + } @inline def apply[T](question: => String, printer: Printers.Printer)(op: => T)(implicit ctx: Context): T = diff --git a/compiler/src/dotty/tools/dotc/transform/ElimJavaPackages.scala b/compiler/src/dotty/tools/dotc/transform/ElimJavaPackages.scala deleted file mode 100644 index a823ce4d7ed7..000000000000 --- a/compiler/src/dotty/tools/dotc/transform/ElimJavaPackages.scala +++ /dev/null @@ -1,46 +0,0 @@ -package dotty.tools.dotc.transform - -import dotty.tools.dotc.ast.tpd._ -import dotty.tools.dotc.core.Contexts.Context -import dotty.tools.dotc.core.Flags._ -import dotty.tools.dotc.core.Types.{Type, TypeRef} -import dotty.tools.dotc.transform.MegaPhase.MiniPhase - -/** - * Eliminates syntactic references to Java packages, so that there's no chance - * they accidentally end up in the backend. - */ -class ElimJavaPackages extends MiniPhase { - - override def phaseName: String = "elimJavaPackages" - - override def transformSelect(tree: Select)(implicit ctx: Context): Tree = { - if (isJavaPackage(tree)) { - assert(tree.tpe.isInstanceOf[TypeRef], s"Expected tree with type TypeRef, but got ${tree.tpe.show}") - Ident(tree.tpe.asInstanceOf[TypeRef]) - } else { - tree - } - } - - override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = { - tree match { - case tree: Select => - assert(!isJavaPackage(tree), s"Unexpected reference to Java package in ${tree.show}") - case _ => () - } - } - - /** - * Is the given tree a syntactic reference to a Java package? - */ - private def isJavaPackage(tree: Select)(implicit ctx: Context): Boolean = { - tree.tpe match { - case TypeRef(prefix, _) => - val flags = prefix.termSymbol.flags - // Testing for each flag separately is more efficient than using FlagConjunction. - flags.is(Package) && flags.is(JavaDefined) - case _ => false - } - } -} diff --git a/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala new file mode 100644 index 000000000000..9cd329fd96b9 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/ElimPackagePrefixes.scala @@ -0,0 +1,31 @@ +package dotty.tools.dotc +package transform + +import core._ +import Decorators._, Flags._, Types._, Contexts._, Symbols._ +import ast.tpd._ +import Flags._ +import MegaPhase.MiniPhase + +/** Eliminates syntactic references to package terms as prefixes of classes, so that there's no chance + * they accidentally end up in the backend. + */ +class ElimPackagePrefixes extends MiniPhase { + + override def phaseName: String = "elimPackagePrefixes" + + override def transformSelect(tree: Select)(implicit ctx: Context): Tree = + if (isPackageClassRef(tree)) Ident(tree.tpe.asInstanceOf[TypeRef]) else tree + + override def checkPostCondition(tree: Tree)(implicit ctx: Context): Unit = tree match { + case tree: Select => + assert(!isPackageClassRef(tree), i"Unexpected reference to package in $tree") + case _ => + } + + /** Is the given tree a reference to a type in a package? */ + private def isPackageClassRef(tree: Select)(implicit ctx: Context): Boolean = tree.tpe match { + case TypeRef(prefix, _) => prefix.termSymbol.is(Package) + case _ => false + } +} diff --git a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala index 34bc6c6e2694..e5ee8556ec88 100644 --- a/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala +++ b/compiler/src/dotty/tools/dotc/transform/ExpandPrivate.scala @@ -74,7 +74,7 @@ class ExpandPrivate extends MiniPhase with IdentityDenotTransformer { thisPhase private def ensurePrivateAccessible(d: SymDenotation)(implicit ctx: Context) = if (isVCPrivateParamAccessor(d)) d.ensureNotPrivate.installAfter(thisPhase) - else if (d.is(PrivateTerm) && !d.owner.is(Package) && d.owner != ctx.owner.enclosingClass) { + else if (d.is(PrivateTerm) && !d.owner.is(Package) && d.owner != ctx.owner.lexicallyEnclosingClass) { // Paths `p1` and `p2` are similar if they have a common suffix that follows // possibly different directory paths. That is, their common suffix extends // in both cases either to the start of the path or to a file separator character. diff --git a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala index a3e877a3c292..af46afec3a6b 100644 --- a/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/FirstTransform.scala @@ -188,19 +188,20 @@ class FirstTransform extends MiniPhase with InfoTransformer { thisPhase => override def transformStats(trees: List[Tree])(implicit ctx: Context): List[Tree] = ast.Trees.flatten(reorderAndComplete(trees)(ctx.withPhase(thisPhase.next))) + private def toTypeTree(tree: Tree)(implicit ctx: Context) = + TypeTree(tree.tpe).withPos(tree.pos) + override def transformOther(tree: Tree)(implicit ctx: Context) = tree match { case tree: Import => EmptyTree case tree: NamedArg => transformAllDeep(tree.arg) - case tree => if (tree.isType) TypeTree(tree.tpe).withPos(tree.pos) else tree + case tree => if (tree.isType) toTypeTree(tree) else tree } override def transformIdent(tree: Ident)(implicit ctx: Context) = - if (tree.isType) TypeTree(tree.tpe).withPos(tree.pos) - else constToLiteral(tree) + if (tree.isType) toTypeTree(tree) else constToLiteral(tree) override def transformSelect(tree: Select)(implicit ctx: Context) = - if (tree.isType) TypeTree(tree.tpe).withPos(tree.pos) - else constToLiteral(tree) + if (tree.isType) toTypeTree(tree) else constToLiteral(tree) override def transformTypeApply(tree: TypeApply)(implicit ctx: Context) = constToLiteral(tree) diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala index 5ef7b4d9d28c..9d84a2adb66c 100644 --- a/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala +++ b/compiler/src/dotty/tools/dotc/transform/MacroTransform.scala @@ -41,7 +41,6 @@ abstract class MacroTransform extends Phase { def transformStats(trees: List[Tree], exprOwner: Symbol)(implicit ctx: Context): List[Tree] = { def transformStat(stat: Tree): Tree = stat match { case _: Import | _: DefTree => transform(stat) - case Thicket(stats) => cpy.Thicket(stat)(stats mapConserve transformStat) case _ => transform(stat)(ctx.exprContext(stat, exprOwner)) } flatten(trees.mapconserve(transformStat(_))) diff --git a/compiler/src/dotty/tools/dotc/transform/MacroTransformWithImplicits.scala b/compiler/src/dotty/tools/dotc/transform/MacroTransformWithImplicits.scala new file mode 100644 index 000000000000..e6c8e255a265 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/MacroTransformWithImplicits.scala @@ -0,0 +1,90 @@ +package dotty.tools.dotc +package transform + +import core._ +import typer._ +import ast.Trees._ +import Contexts._ +import Symbols._ +import Decorators._ +import collection.mutable +import annotation.tailrec + +/** A Macrotransform that maintains the necessary infrastructore to support + * contxtual implicit searches (type-scope implicits are supported anyway). + */ +abstract class MacroTransformWithImplicits extends MacroTransform { + import ast.tpd._ + + override def allowsImplicitSearch = true + + class ImplicitsTransformer extends Transformer { + + /** Transform statements, while maintaining import contexts and expression contexts + * in the same way as Typer does. The code addresses additional concerns: + * - be tail-recursive where possible + * - don't re-allocate trees where nothing has changed + */ + override def transformStats(stats: List[Tree], exprOwner: Symbol)(implicit ctx: Context): List[Tree] = { + + @tailrec def traverse(curStats: List[Tree])(implicit ctx: Context): List[Tree] = { + + def recur(stats: List[Tree], changed: Tree, rest: List[Tree])(implicit ctx: Context): List[Tree] = { + if (stats eq curStats) { + val rest1 = transformStats(rest, exprOwner) + changed match { + case Thicket(trees) => trees ::: rest1 + case tree => tree :: rest1 + } + } + else stats.head :: recur(stats.tail, changed, rest) + } + + curStats match { + case stat :: rest => + val statCtx = stat match { + case stat: DefTree => ctx + case _ => ctx.exprContext(stat, exprOwner) + } + val restCtx = stat match { + case stat: Import => ctx.importContext(stat, stat.symbol) + case _ => ctx + } + val stat1 = transform(stat)(statCtx) + if (stat1 ne stat) recur(stats, stat1, rest)(restCtx) + else traverse(rest)(restCtx) + case nil => + stats + } + } + traverse(stats) + } + + private def nestedScopeCtx(defs: List[Tree])(implicit ctx: Context): Context = { + val nestedCtx = ctx.fresh.setNewScope + defs foreach { + case d: DefTree => nestedCtx.enter(d.symbol) + case _ => + } + nestedCtx + } + + override def transform(tree: Tree)(implicit ctx: Context): Tree = { + def localCtx = ctx.withOwner(tree.symbol) + tree match { + case tree: Block => + super.transform(tree)(nestedScopeCtx(tree.stats)) + case tree: DefDef => + implicit val ctx = localCtx + cpy.DefDef(tree)( + tree.name, + transformSub(tree.tparams), + tree.vparamss mapConserve (transformSub(_)), + transform(tree.tpt), + transform(tree.rhs)(nestedScopeCtx(tree.vparamss.flatten))) + case _ => + super.transform(tree) + } + } + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala index 0bec80c49cb9..56d5a011ec4b 100644 --- a/compiler/src/dotty/tools/dotc/transform/PostTyper.scala +++ b/compiler/src/dotty/tools/dotc/transform/PostTyper.scala @@ -159,6 +159,24 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase } } + /** 1. If we are an an inline method but not in a nested quote, mark the inline method + * as a macro. + * + * 2. If selection is a quote or splice node, record that fact in the current compilation unit. + */ + private def handleMeta(sym: Symbol)(implicit ctx: Context): Unit = { + + def markAsMacro(c: Context): Unit = + if (c.owner eq c.outer.owner) markAsMacro(c.outer) + else if (c.owner.isInlineMethod) c.owner.setFlag(Macro) + else if (!c.outer.owner.is(Package)) markAsMacro(c.outer) + + if (sym.isSplice || sym.isQuote) { + markAsMacro(ctx) + ctx.compilationUnit.containsQuotesOrSplices = true + } + } + override def transform(tree: Tree)(implicit ctx: Context): Tree = try tree match { case tree: Ident if !tree.isType => @@ -167,6 +185,7 @@ class PostTyper extends MacroTransform with IdentityDenotTransformer { thisPhase case _ => tree } case tree @ Select(qual, name) => + handleMeta(tree.symbol) if (name.isTypeName) { Checking.checkRealizable(qual.tpe, qual.pos.focus) super.transform(tree) diff --git a/compiler/src/dotty/tools/dotc/transform/ReifyQuotes.scala b/compiler/src/dotty/tools/dotc/transform/ReifyQuotes.scala new file mode 100644 index 000000000000..49decaf67949 --- /dev/null +++ b/compiler/src/dotty/tools/dotc/transform/ReifyQuotes.scala @@ -0,0 +1,353 @@ +package dotty.tools.dotc +package transform + +import core._ +import Decorators._, Flags._, Types._, Contexts._, Symbols._, Constants._ +import Flags._ +import ast.Trees._ +import ast.TreeTypeMap +import util.Positions._ +import StdNames._ +import ast.untpd +import tasty.TreePickler.Hole +import MegaPhase.MiniPhase +import SymUtils._ +import NameKinds.OuterSelectName +import scala.collection.mutable + +/** Translates quoted terms and types to `unpickle` method calls. + * Checks that the phase consistency principle (PCP) holds. + */ +class ReifyQuotes extends MacroTransform { + import ast.tpd._ + + override def phaseName: String = "reifyQuotes" + + override def run(implicit ctx: Context): Unit = + if (ctx.compilationUnit.containsQuotesOrSplices) super.run + + protected def newTransformer(implicit ctx: Context): Transformer = + new Reifier(inQuote = false, null, 0, new LevelInfo) + + /** Serialize `tree`. Embedded splices are represented as nodes of the form + * + * Select(qual, sym) + * + * where `sym` is either `defn.QuotedExpr_~` or `defn.QuotedType_~`. For any splice, + * the `qual` part should not be pickled, since it will be added separately later + * as a splice. + */ + def pickleTree(tree: Tree, isType: Boolean)(implicit ctx: Context): String = + tree.show // TODO: replace with TASTY + + private class LevelInfo { + /** A map from locally defined symbols to the staging levels of their definitions */ + val levelOf = new mutable.HashMap[Symbol, Int] + + /** A stack of entered symbols, to be unwound after scope exit */ + var enteredSyms: List[Symbol] = Nil + } + + /** A tree substituter that also works for holes */ + class SubstMap( + typeMap: Type => Type = IdentityTypeMap, + treeMap: Tree => Tree = identity _, + oldOwners: List[Symbol] = Nil, + newOwners: List[Symbol] = Nil, + substFrom: List[Symbol], + substTo: List[Symbol])(implicit ctx: Context) + extends TreeTypeMap(typeMap, treeMap, oldOwners, newOwners, substFrom, substTo) { + + override def transform(tree: Tree)(implicit ctx: Context): Tree = tree match { + case Hole(n, args) => + Hole(n, args.mapConserve(transform)).withPos(tree.pos).withType(mapType(tree.tpe)) + case _ => + super.transform(tree) + } + + override def newMap( + typeMap: Type => Type, + treeMap: Tree => Tree, + oldOwners: List[Symbol], + newOwners: List[Symbol], + substFrom: List[Symbol], + substTo: List[Symbol])(implicit ctx: Context) = + new SubstMap(typeMap, treeMap, oldOwners, newOwners, substFrom, substTo) + } + + /** Requiring that `paramRefs` consists of a single reference `seq` to a Seq[Any], + * a tree map that replaces each hole with index `n` with `seq(n)`, applied + * to any arguments in the hole. + */ + private def replaceHoles(paramRefs: List[Tree]) = new TreeMap { + val seq :: Nil = paramRefs + override def transform(tree: Tree)(implicit ctx: Context): Tree = tree match { + case Hole(n, args) => + val arg = + seq.select(nme.apply).appliedTo(Literal(Constant(n))).ensureConforms(tree.tpe) + if (args.isEmpty) arg + else arg.select(nme.apply).appliedTo(SeqLiteral(args, TypeTree(defn.AnyType))) + case _ => + super.transform(tree) + } + } + + /** If `tree` has holes, convert it to a function taking a `Seq` of elements as arguments + * where each hole is replaced by the corresponding sequence element. + */ + private def elimHoles(tree: Tree)(implicit ctx: Context): Tree = + if (tree.existsSubTree(_.isInstanceOf[Hole])) + Lambda( + MethodType(defn.SeqType.appliedTo(defn.AnyType) :: Nil, tree.tpe), + replaceHoles(_).transform(tree)) + else tree + + /** The main transformer class + * @param inQuote we are within a `'(...)` context that is not shadowed by a nested `~(...)` + * @param outer the next outer reifier, null is this is the topmost transformer + * @param level the current level, where quotes add one and splices subtract one level + * @param levels a stacked map from symbols to the levels in which they were defined + */ + private class Reifier(inQuote: Boolean, val outer: Reifier, val level: Int, levels: LevelInfo) extends Transformer { + import levels._ + + /** A nested reifier for a quote (if `isQuote = true`) or a splice (if not) */ + def nested(isQuote: Boolean): Reifier = + new Reifier(isQuote, this, if (isQuote) level + 1 else level - 1, levels) + + /** We are in a `~(...)` context that is not shadowed by a nested `'(...)` */ + def inSplice = outer != null && !inQuote + + /** A list of embedded quotes (if `inSplice = true`) or splices (if `inQuote = true`) */ + val embedded = new mutable.ListBuffer[Tree] + + /** A map from type ref T to "expression of type `quoted.Type[T]`". + * These will be turned into splices using `addTags` + */ + val importedTypes = new mutable.LinkedHashSet[TypeRef]() + + /** Assuming typeTagOfRef = `Type1 -> tag1, ..., TypeN -> tagN`, the expression + * + * { type = .unary_~ + * ... + * type = .unary.~ + * + * } + * + * where all references to `TypeI` in `expr` are rewired to point to the locally + * defined versions. As a side effect, prepend the expressions `tag1, ..., `tagN` + * as splices to `buf`. + */ + def addTags(expr: Tree)(implicit ctx: Context): Tree = + if (importedTypes.isEmpty) expr + else { + val trefs = importedTypes.toList + val typeDefs = for (tref <- trefs) yield { + val tag = New(defn.QuotedTypeType.appliedTo(tref), Nil) + val rhs = transform(tag.select(tpnme.UNARY_~)) + val alias = ctx.typeAssigner.assignType(untpd.TypeBoundsTree(rhs, rhs), rhs, rhs) + val original = tref.symbol.asType + val local = original.copy( + owner = ctx.owner, + flags = Synthetic, + info = TypeAlias(tag.tpe.select(tpnme.UNARY_~))) + ctx.typeAssigner.assignType(untpd.TypeDef(original.name, alias), local) + } + importedTypes.clear() + Block(typeDefs, + new SubstMap(substFrom = trefs.map(_.symbol), substTo = typeDefs.map(_.symbol)) + .apply(expr)) + } + + /** Enter staging level of symbol defined by `tree`, if applicable. */ + def markDef(tree: Tree)(implicit ctx: Context) = tree match { + case tree: DefTree => + val sym = tree.symbol + if ((sym.isClass || !sym.maybeOwner.isType) && !levelOf.contains(sym)) { + levelOf(sym) = level + enteredSyms = sym :: enteredSyms + } + case _ => + } + + /** Does the level of `sym` match the current level? + * An exception is made for inline vals in macros. These are also OK if their level + * is one higher than the current level, because on execution such values + * are constant expression trees and we can pull out the constant from the tree. + */ + def levelOK(sym: Symbol)(implicit ctx: Context): Boolean = levelOf.get(sym) match { + case Some(l) => + l == level || + sym.is(Inline) && sym.owner.is(Macro) && sym.info.isValueType && l - 1 == level + case None => + true + } + + /** Issue a "splice outside quote" error unless we ar in the body of an inline method */ + def spliceOutsideQuotes(pos: Position)(implicit ctx: Context) = + ctx.error(i"splice outside quotes", pos) + + /** Check reference to `sym` for phase consistency, where `tp` is the underlying type + * by which we refer to `sym`. + */ + def check(sym: Symbol, tp: Type, pos: Position)(implicit ctx: Context): Unit = { + val isThis = tp.isInstanceOf[ThisType] + def symStr = + if (!isThis) sym.show + else if (sym.is(ModuleClass)) sym.sourceModule.show + else i"${sym.name}.this" + if (!isThis && sym.maybeOwner.isType) + check(sym.owner, sym.owner.thisType, pos) + else if (sym.exists && !sym.isStaticOwner && !levelOK(sym)) + tp match { + case tp: TypeRef => + importedTypes += tp + case _ => + ctx.error(em"""access to $symStr from wrong staging level: + | - the definition is at level ${levelOf(sym)}, + | - but the access is at level $level.""", pos) + } + } + + /** Check all named types and this-types in a given type for phase consistency. */ + def checkType(pos: Position)(implicit ctx: Context): TypeAccumulator[Unit] = new TypeAccumulator[Unit] { + def apply(acc: Unit, tp: Type): Unit = reporting.trace(i"check type level $tp at $level") { + tp match { + case tp: NamedType if tp.symbol.isSplice => + if (inQuote) outer.checkType(pos).foldOver(acc, tp) + else { + spliceOutsideQuotes(pos) + tp + } + case tp: NamedType => + check(tp.symbol, tp, pos) + foldOver(acc, tp) + case tp: ThisType => + check(tp.cls, tp, pos) + foldOver(acc, tp) + case _ => + foldOver(acc, tp) + } + } + } + + /** If `tree` refers to a locally defined symbol (either directly, or in a pickled type), + * check that its staging level matches the current level. References to types + * that are phase-incorrect can still be healed as follows: + * + * If `T` is a reference to a type at the wrong level, heal it by setting things up + * so that we later add a type definition + * + * type T' = ~quoted.Type[T] + * + * to the quoted text and rename T to T' in it. This is done later in `reify` via + * `addTags`. `checkLevel` itself only records what needs to be done in the + * `typeTagOfRef` field of the current `Splice` structure. + */ + private def checkLevel(tree: Tree)(implicit ctx: Context): Tree = { + tree match { + case (_: Ident) | (_: This) => + check(tree.symbol, tree.tpe, tree.pos) + case (_: UnApply) | (_: TypeTree) => + checkType(tree.pos).apply((), tree.tpe) + case Select(qual, OuterSelectName(_, levels)) => + checkType(tree.pos).apply((), tree.tpe.widen) + case _: Bind => + checkType(tree.pos).apply((), tree.symbol.info) + case _: Template => + checkType(tree.pos).apply((), tree.symbol.owner.asClass.givenSelfType) + case _ => + } + tree + } + + /** Split `body` into a core and a list of embedded splices. + * Then if inside a splice, make a hole from these parts. + * If outside a splice, generate a call tp `scala.quoted.Unpickler.unpickleType` or + * `scala.quoted.Unpickler.unpickleExpr` that matches `tpe` with + * core and splices as arguments. + */ + private def quotation(body: Tree, quote: Tree)(implicit ctx: Context) = { + val (body1, splices) = nested(isQuote = true).split(body) + if (inSplice) + makeHole(body1, splices, quote.tpe) + else { + val isType = quote.tpe.isRef(defn.QuotedTypeClass) + ref(if (isType) defn.Unpickler_unpickleType else defn.Unpickler_unpickleExpr) + .appliedToType(if (isType) body1.tpe else body1.tpe.widen) + .appliedTo( + Literal(Constant(pickleTree(body1, isType))), + SeqLiteral(splices, TypeTree(defn.AnyType))) + } + }.withPos(quote.pos) + + /** If inside a quote, split `body` into a core and a list of embedded quotes + * and make a hole from these parts. Otherwise issue an error, unless we + * are in the body of an inline method. + */ + private def splice(body: Tree, splice: Tree)(implicit ctx: Context): Tree = { + if (inQuote) { + val (body1, quotes) = nested(isQuote = false).split(body) + makeHole(body1, quotes, splice.tpe) + } + else { + spliceOutsideQuotes(splice.pos) + splice + } + }.withPos(splice.pos) + + /** Transform `tree` and return the resulting tree and all `embedded` quotes + * or splices as a pair, after performing the `addTags` transform. + */ + private def split(tree: Tree)(implicit ctx: Context): (Tree, List[Tree]) = { + val tree1 = addTags(transform(tree)) + (tree1, embedded.toList.map(elimHoles)) + } + + /** Register `body` as an `embedded` quote or splice + * and return a hole with `splices` as arguments and the given type `tpe`. + */ + private def makeHole(body: Tree, splices: List[Tree], tpe: Type)(implicit ctx: Context): Hole = { + val idx = embedded.length + embedded += body + Hole(idx, splices).withType(tpe).asInstanceOf[Hole] + } + + override def transform(tree: Tree)(implicit ctx: Context): Tree = + reporting.trace(i"reify $tree at $level", show = true) { + def mapOverTree(lastEntered: List[Symbol]) = + try super.transform(tree) + finally + while (enteredSyms ne lastEntered) { + levelOf -= enteredSyms.head + enteredSyms = enteredSyms.tail + } + tree match { + case Apply(fn, arg :: Nil) if fn.symbol == defn.quoteMethod => + quotation(arg, tree) + case TypeApply(fn, arg :: Nil) if fn.symbol == defn.typeQuoteMethod => + quotation(arg, tree) + case Select(body, _) if tree.symbol.isSplice => + splice(body, tree) + case Block(stats, _) => + val last = enteredSyms + stats.foreach(markDef) + mapOverTree(last) + case Inlined(call, bindings, expansion @ Select(body, name)) if expansion.symbol.isSplice => + // To maintain phase consistency, convert inlined expressions of the form + // `{ bindings; ~expansion }` to `~{ bindings; expansion }` + transform(cpy.Select(expansion)(cpy.Inlined(tree)(call, bindings, body), name)) + case _: Import => + tree + case tree: DefDef if tree.symbol.is(Macro) && level == 0 => + markDef(tree) + val tree1 = nested(isQuote = true).transform(tree) + // check macro code as it if appeared in a quoted context + cpy.DefDef(tree)(rhs = EmptyTree) + case _ => + markDef(tree) + checkLevel(mapOverTree(enteredSyms)) + } + } + } +} \ No newline at end of file diff --git a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala index 776ecb057892..c30fa18a807e 100644 --- a/compiler/src/dotty/tools/dotc/transform/SymUtils.scala +++ b/compiler/src/dotty/tools/dotc/transform/SymUtils.scala @@ -175,4 +175,12 @@ class SymUtils(val self: Symbol) extends AnyVal { else if (owner.is(Package)) false else owner.isLocal } + + /** Is symbol a quote operation? */ + def isQuote(implicit ctx: Context): Boolean = + self == defn.quoteMethod || self == defn.typeQuoteMethod + + /** Is symbol a splice operation? */ + def isSplice(implicit ctx: Context): Boolean = + self == defn.QuotedExpr_~ || self == defn.QuotedType_~ } diff --git a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala index 237bc2d2efad..f59d418d110d 100644 --- a/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala +++ b/compiler/src/dotty/tools/dotc/transform/TreeChecker.scala @@ -375,7 +375,8 @@ class TreeChecker extends Phase with SymTransformer { def isNonMagicalMethod(x: Symbol) = x.is(Method) && !x.isCompanionMethod && - !x.isValueClassConvertMethod + !x.isValueClassConvertMethod && + !(x.is(Macro) && ctx.phase.refChecked) val symbolsNotDefined = cls.classInfo.decls.toList.toSet.filter(isNonMagicalMethod) -- impl.body.map(_.symbol) - constr.symbol diff --git a/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala b/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala index 6cb045853ad1..19eb297ff652 100644 --- a/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala +++ b/compiler/src/dotty/tools/dotc/typer/FrontEnd.scala @@ -19,6 +19,8 @@ class FrontEnd extends Phase { override def isTyper = true import ast.tpd + override def allowsImplicitSearch = true + /** The contexts for compilation units that are parsed but not yet entered */ private[this] var remaining: List[Context] = Nil diff --git a/compiler/src/dotty/tools/dotc/typer/Implicits.scala b/compiler/src/dotty/tools/dotc/typer/Implicits.scala index 302d19924d13..d35852a2b2e8 100644 --- a/compiler/src/dotty/tools/dotc/typer/Implicits.scala +++ b/compiler/src/dotty/tools/dotc/typer/Implicits.scala @@ -672,13 +672,14 @@ trait Implicits { self: Typer => | | ${arg.show.replace("\n", "\n ")} | - |But $tpe.explanation}.""" + |But ${tpe.explanation}.""" } } + def location(preposition: String) = if (where.isEmpty) "" else s" $preposition $where" arg.tpe match { case ambi: AmbiguousImplicits => - msg(s"ambiguous implicit arguments: ${ambi.explanation} of $where")( - s"ambiguous implicit arguments of type ${pt.show} found for $where") + msg(s"ambiguous implicit arguments: ${ambi.explanation}${location("of")}")( + s"ambiguous implicit arguments of type ${pt.show} found${location("for")}") case _ => val userDefined = for { @@ -691,7 +692,7 @@ trait Implicits { self: Typer => pt.typeSymbol.typeParams.map(_.name.unexpandedName.toString), pt.argInfos) } - msg(userDefined.getOrElse(em"no implicit argument of type $pt was found for $where"))() + msg(userDefined.getOrElse(em"no implicit argument of type $pt was found${location("for")}"))() } } @@ -748,7 +749,7 @@ trait Implicits { self: Typer => * !!! todo: catch potential cycles */ def inferImplicit(pt: Type, argument: Tree, pos: Position)(implicit ctx: Context): SearchResult = track("inferImplicit") { - assert(!ctx.isAfterTyper, + assert(ctx.phase.allowsImplicitSearch, if (argument.isEmpty) i"missing implicit parameter of type $pt after typer" else i"type error: ${argument.tpe} does not conform to $pt${err.whyNoMatchStr(argument.tpe, pt)}") trace(s"search implicit ${pt.show}, arg = ${argument.show}: ${argument.tpe.show}", implicits, show = true) { diff --git a/compiler/src/dotty/tools/dotc/typer/Inliner.scala b/compiler/src/dotty/tools/dotc/typer/Inliner.scala index 3240b3556e10..368b7383d794 100644 --- a/compiler/src/dotty/tools/dotc/typer/Inliner.scala +++ b/compiler/src/dotty/tools/dotc/typer/Inliner.scala @@ -496,32 +496,34 @@ class Inliner(call: tpd.Tree, rhs: tpd.Tree)(implicit ctx: Context) { val expansion1 = InlineTyper.typed(expansion, pt)(inlineCtx) /** Does given definition bind a closure that will be inlined? */ - def bindsDeadClosure(defn: ValOrDefDef) = Ident(defn.symbol.termRef) match { - case InlineableClosure(_) => !InlineTyper.retainedClosures.contains(defn.symbol) + def bindsDeadInlineable(defn: ValOrDefDef) = Ident(defn.symbol.termRef) match { + case InlineableArg(_) => !InlineTyper.retainedInlineables.contains(defn.symbol) case _ => false } /** All bindings in `bindingsBuf` except bindings of inlineable closures */ - val bindings = bindingsBuf.toList.filterNot(bindsDeadClosure).map(_.withPos(call.pos)) + val bindings = bindingsBuf.toList.filterNot(bindsDeadInlineable).map(_.withPos(call.pos)) tpd.Inlined(call, bindings, expansion1) } } - /** An extractor for references to closure arguments that refer to `@inline` methods */ - private object InlineableClosure { + /** An extractor for references to inlineable arguments. These are : + * - by-value arguments marked with `inline` + * - all by-name arguments + */ + private object InlineableArg { lazy val paramProxies = paramProxy.values.toSet def unapply(tree: Ident)(implicit ctx: Context): Option[Tree] = - if (paramProxies.contains(tree.tpe)) { + if (paramProxies.contains(tree.tpe)) bindingsBuf.find(_.name == tree.name) match { - case Some(ddef: ValDef) if ddef.symbol.is(Inline) => - ddef.rhs match { - case closure(_, meth, _) => Some(meth) - case _ => None - } + case Some(vdef: ValDef) if vdef.symbol.is(Inline) => + Some(vdef.rhs.changeOwner(vdef.symbol, ctx.owner)) + case Some(ddef: DefDef) => + Some(ddef.rhs.changeOwner(ddef.symbol, ctx.owner)) case _ => None } - } else None + else None } /** A typer for inlined code. Its purpose is: @@ -533,16 +535,13 @@ class Inliner(call: tpd.Tree, rhs: tpd.Tree)(implicit ctx: Context) { */ private object InlineTyper extends ReTyper { - var retainedClosures = Set[Symbol]() + var retainedInlineables = Set[Symbol]() - override def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context) = { - val tree1 = super.typedIdent(tree, pt) - tree1 match { - case InlineableClosure(_) => retainedClosures += tree.symbol - case _ => + override def typedIdent(tree: untpd.Ident, pt: Type)(implicit ctx: Context) = + tree.asInstanceOf[tpd.Tree] match { + case InlineableArg(rhs) => inlining.println(i"inline arg $tree -> $rhs"); rhs + case _ => super.typedIdent(tree, pt) } - tree1 - } override def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = { assert(tree.hasType, tree) @@ -565,12 +564,13 @@ class Inliner(call: tpd.Tree, rhs: tpd.Tree)(implicit ctx: Context) { } } - override def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context) = tree.asInstanceOf[tpd.Tree] match { - case Apply(Select(InlineableClosure(fn), nme.apply), args) => - inlining.println(i"reducing $tree with closure $fn") - typed(fn.appliedToArgs(args), pt) - case _ => - super.typedApply(tree, pt) - } + override def typedApply(tree: untpd.Apply, pt: Type)(implicit ctx: Context) = + tree.asInstanceOf[tpd.Tree] match { + case Apply(Select(InlineableArg(closure(_, fn, _)), nme.apply), args) => + inlining.println(i"reducing $tree with closure $fn") + typed(fn.appliedToArgs(args), pt) + case _ => + super.typedApply(tree, pt) + } } } diff --git a/compiler/src/dotty/tools/dotc/typer/Namer.scala b/compiler/src/dotty/tools/dotc/typer/Namer.scala index 09ffc9f0d0e4..0e7cecb0f360 100644 --- a/compiler/src/dotty/tools/dotc/typer/Namer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Namer.scala @@ -385,15 +385,6 @@ class Namer { typer: Typer => case _ => tree } - /** A new context that summarizes an import statement */ - def importContext(imp: Import, sym: Symbol)(implicit ctx: Context) = { - val impNameOpt = imp.expr match { - case ref: RefTree => Some(ref.name.asTermName) - case _ => None - } - ctx.fresh.setImportInfo(new ImportInfo(implicit ctx => sym, imp.selectors, impNameOpt)) - } - /** A new context for the interior of a class */ def inClassContext(selfInfo: DotClass /* Should be Type | Symbol*/)(implicit ctx: Context): Context = { val localCtx: Context = ctx.fresh.setNewScope @@ -441,7 +432,7 @@ class Namer { typer: Typer => setDocstring(pkg, stat) ctx case imp: Import => - importContext(imp, createSymbol(imp)) + ctx.importContext(imp, createSymbol(imp)) case mdef: DefTree => val sym = enterSymbol(createSymbol(mdef)) setDocstring(sym, origStat) diff --git a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala index fcbf7c6ff5e4..aaadbb66ec12 100644 --- a/compiler/src/dotty/tools/dotc/typer/RefChecks.scala +++ b/compiler/src/dotty/tools/dotc/typer/RefChecks.scala @@ -130,7 +130,7 @@ object RefChecks { * That is for overriding member M and overridden member O: * * 1.1. M must have the same or stronger access privileges as O. - * 1.2. O must not be final. + * 1.2. O must not be effectively final. * 1.3. O is deferred, or M has `override` modifier. * 1.4. If O is stable, then so is M. * // @M: LIFTED 1.5. Neither M nor O are a parameterized type alias @@ -144,8 +144,9 @@ object RefChecks { * 1.8.1 M's type is a subtype of O's type, or * 1.8.2 M is of type []S, O is of type ()T and S <: T, or * 1.8.3 M is of type ()S, O is of type []T and S <: T, or - * 1.9. If M is a macro def, O cannot be deferred unless there's a concrete method overriding O. - * 1.10. If M is not a macro def, O cannot be a macro def. + * 1.9 M must not be a Dotty macro def + * 1.10. If M is a 2.x macro def, O cannot be deferred unless there's a concrete method overriding O. + * 1.11. If M is not a macro def, O cannot be a macro def. * 2. Check that only abstract classes have deferred members * 3. Check that concrete classes do not have deferred definitions * that are not implemented in a subclass. @@ -372,9 +373,11 @@ object RefChecks { overrideError("may not override a non-lazy value") } else if (other.is(Lazy) && !other.isRealMethod && !member.is(Lazy)) { overrideError("must be declared lazy to override a lazy value") - } else if (other.is(Deferred) && member.is(Macro) && member.extendedOverriddenSymbols.forall(_.is(Deferred))) { // (1.9) + } else if (member.is(Macro, butNot = Scala2x)) { // (1.9) + overrideError("is a macro, may not override anything") + } else if (other.is(Deferred) && member.is(Scala2Macro) && member.extendedOverriddenSymbols.forall(_.is(Deferred))) { // (1.10) overrideError("cannot be used here - term macros cannot override abstract methods") - } else if (other.is(Macro) && !member.is(Macro)) { // (1.10) + } else if (other.is(Macro) && !member.is(Macro)) { // (1.11) overrideError("cannot be used here - only term macros can override term macros") } else if (!compatibleTypes(memberTp(self), otherTp(self)) && !compatibleTypes(memberTp(upwardsSelf), otherTp(upwardsSelf))) { diff --git a/compiler/src/dotty/tools/dotc/typer/Typer.scala b/compiler/src/dotty/tools/dotc/typer/Typer.scala index 1519780ea8be..2648c2d183d3 100644 --- a/compiler/src/dotty/tools/dotc/typer/Typer.scala +++ b/compiler/src/dotty/tools/dotc/typer/Typer.scala @@ -393,6 +393,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit checkValue(assignType(cpy.Select(tree)(qual, tree.name), qual), pt) def typedSelect(tree: untpd.Select, pt: Type)(implicit ctx: Context): Tree = track("typedSelect") { + def typeSelectOnTerm(implicit ctx: Context): Tree = { val qual1 = typedExpr(tree.qualifier, selectionProto(tree.name, pt, this)) if (tree.name.isTypeName) checkStable(qual1.tpe, qual1.pos) @@ -1072,6 +1073,25 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit Throw(expr1).withPos(tree.pos) } + def typedQuote(tree: untpd.Quote, pt: Type)(implicit ctx: Context): Tree = track("typedQuote") { + val untpd.Quote(body) = tree + val isType = body.isType + val resultClass = if (isType) defn.QuotedTypeClass else defn.QuotedExprClass + val proto1 = pt.baseType(resultClass) match { + case AppliedType(_, argType :: Nil) => argType + case _ => WildcardType + } + val nestedCtx = ctx.fresh.setTree(tree) + if (isType) { + val body1 = typedType(body, proto1)(nestedCtx) + ref(defn.typeQuoteMethod).appliedToTypeTrees(body1 :: Nil) + } + else { + val body1 = typed(body, proto1)(nestedCtx) + ref(defn.quoteMethod).appliedToType(body1.tpe.widen).appliedTo(body1) + } + } + def typedSeqLiteral(tree: untpd.SeqLiteral, pt: Type)(implicit ctx: Context): SeqLiteral = track("typedSeqLiteral") { val proto1 = pt.elemType match { case NoType => WildcardType @@ -1300,7 +1320,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit case rhs => typedExpr(rhs, tpt1.tpe) } val vdef1 = assignType(cpy.ValDef(vdef)(name, tpt1, rhs1), sym) - if (sym.is(Inline, butNot = DeferredOrParamAccessor)) + if (sym.is(Inline, butNot = DeferredOrParamOrAccessor)) checkInlineConformant(rhs1, em"right-hand side of inline $sym") patchIfLazy(vdef1) patchFinalVals(vdef1) @@ -1695,6 +1715,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit case tree: untpd.Super => typedSuper(tree, pt) case tree: untpd.SeqLiteral => typedSeqLiteral(tree, pt) case tree: untpd.Inlined => typedInlined(tree, pt) + case tree: untpd.Quote => typedQuote(tree, pt) case tree: untpd.TypeTree => typedTypeTree(tree, pt) case tree: untpd.SingletonTypeTree => typedSingletonTypeTree(tree) case tree: untpd.AndTypeTree => typedAndTypeTree(tree) @@ -1757,7 +1778,7 @@ class Typer extends Namer with TypeAssigner with Applications with Implicits wit case (imp: untpd.Import) :: rest => val imp1 = typed(imp) buf += imp1 - traverse(rest)(importContext(imp, imp1.symbol)) + traverse(rest)(ctx.importContext(imp, imp1.symbol)) case (mdef: untpd.DefTree) :: rest => mdef.removeAttachment(ExpandedTree) match { case Some(xtree) => diff --git a/compiler/src/dotty/tools/dotc/util/Stats.scala b/compiler/src/dotty/tools/dotc/util/Stats.scala index c69796f84896..55684a8850bc 100644 --- a/compiler/src/dotty/tools/dotc/util/Stats.scala +++ b/compiler/src/dotty/tools/dotc/util/Stats.scala @@ -46,11 +46,13 @@ import collection.mutable def trackTime[T](fn: String)(op: => T) = if (enabled) doTrackTime(fn)(op) else op - def doTrackTime[T](fn: String)(op: => T) = + def doTrackTime[T](fn: String)(op: => T) = { + def op1 = op if (monitored) { val start = System.nanoTime - try op finally record(fn, ((System.nanoTime - start) / 1000).toInt) - } else op + try op1 finally record(fn, ((System.nanoTime - start) / 1000).toInt) + } else op1 + } class HeartBeat extends Thread() { @volatile private[Stats] var continue = true diff --git a/docs/docs/internals/syntax.md b/docs/docs/internals/syntax.md index 0afe98abdc58..ddc70855ff1d 100644 --- a/docs/docs/internals/syntax.md +++ b/docs/docs/internals/syntax.md @@ -26,7 +26,7 @@ upper ::= ‘A’ | … | ‘Z’ | ‘\$’ | ‘_’ “… and U lower ::= ‘a’ | … | ‘z’ “… and Unicode category Ll” letter ::= upper | lower “… and Unicode categories Lo, Lt, Nl” digit ::= ‘0’ | … | ‘9’ -paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ +paren ::= ‘(’ | ‘)’ | ‘[’ | ‘]’ | ‘{’ | ‘}’ | ‘'(’ | ‘'[’ | ‘'{’ delim ::= ‘`’ | ‘'’ | ‘"’ | ‘.’ | ‘;’ | ‘,’ opchar ::= “printableChar not matched by (whiteSpace | upper | lower | letter | digit | paren | delim | opchar | Unicode_Sm | @@ -131,6 +131,7 @@ AnnotType ::= SimpleType {Annotation} SimpleType ::= SimpleType TypeArgs AppliedTypeTree(t, args) | SimpleType ‘#’ id Select(t, name) | StableId + | [‘-’ | ‘+’ | ‘~’ | ‘!’] StableId PrefixOp(expr, op) | Path ‘.’ ‘type’ SingletonTypeTree(p) | ‘(’ ArgTypes ‘)’ Tuple(ts) | ‘_’ TypeBounds @@ -182,6 +183,9 @@ InfixExpr ::= PrefixExpr PrefixExpr ::= [‘-’ | ‘+’ | ‘~’ | ‘!’] SimpleExpr PrefixOp(expr, op) SimpleExpr ::= ‘new’ Template New(templ) | BlockExpr + | ''{’ BlockExprContents ‘}’ + | ‘'(’ ExprsInParens ‘)’ + | ‘'[’ Type ‘]’ | SimpleExpr1 [‘_’] PostfixOp(expr, _) SimpleExpr1 ::= Literal | Path @@ -198,8 +202,8 @@ ParArgumentExprs ::= ‘(’ ExprsInParens ‘)’ | ‘(’ [ExprsInParens] PostfixExpr ‘:’ ‘_’ ‘*’ ‘)’ exprs :+ Typed(expr, Ident(wildcardStar)) ArgumentExprs ::= ParArgumentExprs | [nl] BlockExpr -BlockExpr ::= ‘{’ CaseClauses ‘}’ Match(EmptyTree, cases) - | ‘{’ Block ‘}’ block // starts at { +BlockExpr ::= ‘{’ BlockExprContents ‘}’ +BlockExprContents ::= CaseClauses | Block Block ::= {BlockStat semi} [BlockResult] Block(stats, expr?) BlockStat ::= Import | {Annotation} [‘implicit’ | ‘lazy’] Def @@ -216,8 +220,8 @@ Enumerator ::= Generator Generator ::= Pattern1 ‘<-’ Expr GenFrom(pat, expr) Guard ::= ‘if’ PostfixExpr -CaseClauses ::= CaseClause { CaseClause } CaseDef(pat, guard?, block) // block starts at => -CaseClause ::= ‘case’ (Pattern [Guard] ‘=>’ Block | INT) +CaseClauses ::= CaseClause { CaseClause } Match(EmptyTree, cases) +CaseClause ::= ‘case’ (Pattern [Guard] ‘=>’ Block | INT) CaseDef(pat, guard?, block) // block starts at => Pattern ::= Pattern1 { ‘|’ Pattern1 } Alternative(pats) Pattern1 ::= PatVar ‘:’ RefinedType Bind(name, Typed(Ident(wildcard), tpe)) diff --git a/docs/docs/reference/symmetric-meta-programming.md b/docs/docs/reference/symmetric-meta-programming.md new file mode 100644 index 000000000000..fdf33b2964f8 --- /dev/null +++ b/docs/docs/reference/symmetric-meta-programming.md @@ -0,0 +1,771 @@ +# Symmetric Meta Programming + +Symmetric meta programming is a new framework for staging and for some +forms of macros. It is expressed as strongly and statically typed +code using two fundamental operations: quotations and splicing. A +novel aspect of the approach is that these two operations are +regulated by a phase consistency principle that treats quotes and +splices in exactly the same way. + +## Overview + +### Quotes and Splices + +Symmetric meta programming is built on two well-known fundamental +operations: quotation and splicing. Quotation is expressed as +`'(...)` or `'{...}` for expressions (both forms are equivalent) and +as `'[...]` for types. Splicing is expressed as a prefix `~` operator. + +For example, the code below presents an inline function `assert` +which calls at compile-time a method `assertImpl` with a boolean +expression tree as argument. `assertImpl` evaluates the expression and +prints it again in an error message if it evaluates to `false`. + + import scala.quoted._ + + inline def assert(expr: => Boolean): Unit = + ~ assertImpl(’(expr)) + + def assertImpl(expr: Expr[Boolean]) = + ’{ if !(~expr) then throw new AssertionError(s"failed assertion: ${~expr}") } + + +If `e` is an expression, then `’(e)` or `’{e}` represent the typed +abstract syntax tree representing `e`. If `T` is a type, then `’[T]` +represents the type structure representing `T`. The precise +definitions of "typed abstract syntax tree" or "type-structure" do not +matter for now, the terms are used only to give some +intuition. Conversely, `~ e` evaluates the expression `e`, which must +yield a typed abstract syntax tree or type structure, and embeds the +result as an expression (respectively, type) in the enclosing program. + +Quotations can have spliced parts in them; in this case the embedded +splices are evaluated and embedded as part of the formation of the +quotation. + +Quotes and splices are duals of each other. For arbitrary +expressions `e` and types `T` we have: + + ~’(e) = e + ’(~e) = e + ~’[T] = T + ’[~T] = T + +### Types for Quotations + +The type signatures of quotes and splices can be described using +two fundamental types: + + - `Expr[T]`: abstract syntax trees representing expressions of type `T` + - `Type[T]`: type structures representing type `T`. + +Quoting takes expressions of type `T` to expressions of type `Expr[T]` +and it takes types `T` to expressions of type `Type[T]`. Splicing +takes expressions of type `Expr[T]` to expressions of type `T` and it +takes expressions of type `Type[T]` to types `T`. + +The two types can be are defined in package `scala.quoted` as follows: + + package scala.quoted + + abstract class Expr[T] { + def unary_~: T // splice operation + } + class Type[T] { + type unary_~ = T // splice type + } + +### The Phase Consistency Principle + +A fundamental *phase consistency principle* (PCP) regulates accesses +to free variables in quoted and spliced code: + + - _For any free variable reference `x`, the number of quoted scopes and the number of spliced scopes between the reference to `x` and the definition of `x` must be equal_. + +Here, `this`-references count as free variables. On the other +hand, we assume that all imports are fully expanded and that `_root_` is +not a free variable. So references to global definitions are +allowed everywhere. + +The phase consistency principle can be motivated as follows: First, +suppose the result of a program `P` is some quoted text `’{ ... x +... }` that refers to a free variable `x` in `P` This can be +represented only by referring to the original variable `x`. Hence, the +result of the program will need to persist the program state itself as +one of its parts. We don’t want to do this, hence this situation +should be made illegal. Dually, suppose a top-level part of a program +is a spliced text `~{ ... x ... }` that refers to a free variable `x` +in `P`. This would mean that we refer during _construction_ of `P` to +a value that is available only during _execution_ of `P`. This is of +course impossible and therefore needs to be ruled out. Now, the +small-step evaluation of a program will reduce quotes and splices in +equal measure using the cancellation rules above. But it will neither +create nor remove quotes or splices individually. So the PCP ensures +that program elaboration will lead to neither of the two unwanted +situations described above. + +In what concerns the range of features it covers, symmetric meta programming is +quite close to the MetaML family of languages. One difference is that MetaML does +not have an equivalent of the PCP - quoted code in MetaML _can_ access +variables in its immediately enclosing environment, with some +restrictions and caveats since such accesses involve serialization. +However, this does not constitute a fundamental gain in +expressiveness. Symmetric meta programming allows to define a `Liftable` +type-class which can implement such accesses within the confines of the +PCP. This is explained further in a later section. + +## Details + +### From `Expr`s to Functions and Back + +The `Expr` companion object contains an "AsFunction" decorator that turns a tree +describing a function into a function mapping trees to trees. + + object Expr { + ... + implicit class AsFunction[T, U](f: Expr[T => U]) extends AnyVal { + def apply(x: Expr[T]): Expr[U] = ??? + } + } + +This decorator gives `Expr` the `apply` operation of an applicative functor, where `Expr`s +over function types can be applied to `Expr` arguments. The definition +of `AsFunction(f).apply(x)` is assumed to be functionally the same as +`’((~f)(~x))`, however it should optimize this call by returning the +result of beta-reducing `f(x)` if `f` is a known lambda expression + +The `AsFunction` decorator distributes applications of `Expr` over function +arrows: + + AsFunction(_).apply: Expr[S => T] => (Expr[S] => Expr[T]) + +Its dual, let’s call it `reflect`, can be defined as follows: + + def reflect[T, U](f: Expr[T] => Expr[U]): Expr[T => U] = ’{ + (x: T) => ~f(’(x)) + } + +Note how the fundamental phase consistency principle works in two +different directions here for `f` and `x`. The reference to `f` is +legal because it is quoted, then spliced, whereas the reference to `x` +is legal because it is spliced, then quoted. + +### Types and the PCP + +In principle, The phase consistency principle applies to types as well +as for expressions. This might seem too restrictive. Indeed, the +definition of `reflect` above is not phase correct since there is a +quote but no splice between the parameter binding of `T` and its +usage. But the code can be made phase correct by adding a binding +of a `Type[T]` tag: + + def reflect[T, U](f: Expr[T] => Expr[U]): Expr[T => U] = { + val Ttag = new Type[T] + ’{ (x: ~Ttag) => ~f(’(x)) + } + +To avoid clutter, the Scala implementation will add these tags +automatically in the case of a PCP violation involving types. As a consequence, +types can be effectively ignored for phase consistency checking. + +### Example Expansion + +Assume an `Array` class with an inline `map` method that forwards to a macro implementation. + + class Array[T] { + inline def map[U](f: T => U): Array[U] = ~ Macros.mapImpl[T, U](’[U], ’(this), ’(f)) + } + +Here’s the definition of the `mapImpl` macro, which takes quoted types and expressions to a quoted expression: + + object Macros { + + def mapImpl[T, U](u: Type[U], arr: Expr[Array[T]], op: Expr[T => U]): Expr[Array[U]] = ’{ + var i = 0 + val xs = ~arr + var len = xs.length + val ys = new Array[~u] + while (i < len) { + ys(i) = ~op(’(xs(i))) + i += 1 + } + ys + } + } + +Here’s an application of `map` and how it rewrites to optimized code: + + genSeq[Int]().map(x => x + 1) + +==> (inline) + + val $this: Seq[Int] = genSeq[Int]() + val f: Int => Int = x => x * x + ~ _root_.Macros.mapImpl[Int, Int](’[Int], ’($this), ’(f)) + +==> (splice) + + val $this: Seq[Int] = genSeq[Int]() + val f: Int => Int = x => x * x + + { + var i = 0 + val xs = ~’($this) + var len = xs.length + val ys = new Array[~’[Int]] + while (i < len) { + ys(i) = ~(’(f)(’(xs(i)))) + i += 1 + } + ys + } + +==> (expand and splice inside quotes) + + val $this: Seq[Int] = genSeq[Int]() + val f: Int => Int = x => x * x + + { + var i = 0 + val xs = $this + var len = xs.length + val ys = new Array[Int] + while (i < len) { + ys(i) = xs(i) + 1 + i += 1 + } + ys + } + +==> (elim dead code) + + val $this: Seq[Int] = genSeq[Int]() + + { + var i = 0 + val xs = $this + var len = xs.length + val ys = new Array[Int] + while (i < len) { + ys(i) = xs(i) + 1 + i += 1 + } + ys + } + +### Relationship with Inline and Macros + +Seen by itself, symmetric meta-programming looks more like a +framework for staging than one for compile-time meta programming with +macros. But combined with Dotty’s `inline` it can be turned into a +compile-time system. The idea is that macro elaboration can be +understood as a combination of a macro library and a quoted +program. For instance, here’s the `assert` macro again together with a +program that calls `assert`. + + object Macros { + + inline def assert(expr: => Boolean): Unit = + ~ assertImpl(’(expr)) + + def assertImpl(expr: Expr[Boolean]) = + ’{ if !(~expr) then throw new AssertionError(s"failed assertion: ${~expr}") } + } + + val program = { + val x = 1 + Macros.assert(x != 0) + } + +Inlining the `assert` function would give the following program: + + val program = { + val x = 1 + ~Macros.assertImpl(’(x != 0)) + } + +The example is only phase correct because Macros is a global value and +as such not subject to phase consistency checking. Conceptually that’s +a bit unsatisfactory. If the PCP is so fundamental, it should be +applicable without the global value exception. But in the example as +given this does not hold since both `assert` and `program` call +`assertImpl` with a splice but no quote. + +However, one can could argue that the example is really missing +an important aspect: The macro library has to be compiled in a phase +prior to the program using it, but in the code above, macro +and program are defined together. A more accurate view of +macros would be to have the user program be in a phase after the macro +definitions, reflecting the fact that macros have to be defined and +compiled before they are used. Hence, conceptually the program part +should be treated by the compiler as if it was quoted: + + val program = ’{ + val x = 1 + ~Macros.assertImpl(’(x != 0)) + } + +If `program` is treated as a quoted expression, the call to +`Macro.assertImpl` becomes phase correct even if macro library and +program are conceptualized as local definitions. + +But what about the call from `assert` to `assertImpl`? Here, we need a +tweak of the typing rules. An inline function such as `assert` that +contains a splice operation outside an enclosing quote is called a +_macro_. Macros are supposed to be expanded in a subsequent phase, +i.e. in a quoted context. Therefore, they are also type checked as if +they were in a quoted context. For instance, the definition of +`assert` is typechecked as if it appeared inside quotes. This makes +the call from `assert` to `assertImpl` phase-correct, even if we +assume that both definitions are local. + +The second role of `inline` in Dotty is to mark a `val` that is +either a constant or is a parameter that will be a constant when instantiated. This +aspect is also important for macro expansion. To illustrate this, +consider an implementation of the `power` function that makes use of a +statically known exponent: + + inline def power(inline n: Int, x: Double) = ~powerCode(n, ’(x)) + + private def powerCode(n: Int, x: Expr[Double]): Expr[Double] = + if (n == 0) ’(1.0) + else if (n == 1) x + else if (n % 2 == 0) ’{ { val y = ~x * ~x; ~powerCode(n / 2, ’(y)) } } + else ’{ ~x * ~powerCode(n - 1, x) } + +The reference to `n` as an argument in `~powerCode(n, ’(x))` is not +phase-consistent, since `n` appears in a splice without an enclosing +quote. Normally that would be a problem because it means that we need +the _value_ of `n` at compile time, which is not available for general +parameters. But since `n` is an inline parameter of a macro, we know +that at the macro’s expansion point `n` will be instantiated to a +constant, so the value of `n` will in fact be known at this +point. To reflect this, we loosen the phase consistency requirements +as follows: + + - If `x` is an inline value (or an inline parameter of an inline +function), it can be accessed in all contexts where the number of +splices minus the number of quotes between use and definition is +either 0 or 1. + +### Relationship with Staging + +The framework expresses at the same time compile-time meta-programming +and staging. The phase in which code is run is determined by the +difference between the number of splice scopes and quote scopes in +which it is embedded. + + - If there are more splices than quotes, the code is run at + "compile-time" i.e. as a macro. In the general case, this means + running an interpreter that evaluates the code, which is + represented as a typed abstract syntax tree. The interpreter can + fall back to reflective calls when evaluating an application of a + previously compiled method. If the splice excess is more than one, + it would mean that a macro’s implementation code (as opposed to the + code it expands to) invokes other macros. If macros are realized by + interpretation, this would lead to towers of interpreters, where + the first interpreter would itself interpret an interpreter code + that possibly interprets another interpreter and so on. + + - If the number of splices equals the number of quotes, the code is + compiled and run as usual. + + - If the number of quotes exceeds the number of splices, the code is + staged. That is, it produces a typed abstract syntax tree or type + structure at run-time. A quote excess of more than one corresponds + to multi-staged programming. + +Providing an interpreter for the full language is quite difficult, and +it is even more difficult to make that interpreter run efficiently. So +we currently impose the following restrictions on the use of splices. + + 1. A top-level splice must appear in an inline function (turning that function + into a macro) + + 2. The splice must call a previously compiled method. + + 3. Splices inside splices (but no intervening quotes) are not allowed. + + 4. A macro method is effectively final and it may override no other method. + +The framework as discussed so far allows code to be staged, i.e. be prepared +to be executed at a later stage. To run that code, there is another method +in class `Expr` called `run`. Note that `~` and `run` both map from `Expr[T]` +to `T` but only `~` is subject to the PCP, whereas `run` is just a normal method. + + abstract class Expr[T] { + def unary_~: T + def run: T // run staged code + } + +### Limitations to Splicing + +Quotes and splices are duals as far as the PCP is concerned. But there is an additional +restriction that needs to be imposed on splices to guarantee soundness: +code in splices must be free of side effects. The restriction prevents code like this: + + var x: Expr[T] + ’{ (y: T) => ~{ x = ’(y); 1 } } + +This code, if it was accepted, would "extrude" a reference to a quoted variable `y` from its scope. +This means we an subsequently access a variable outside the scope where it is defined, which is +likely problematic. The code is clearly phase consistent, so we cannot use PCP to +rule it out. Instead we postulate a future effect system that can guarantee that splices +are pure. In the absence of such a system we simply demand that spliced expressions are +pure by convention, and allow for undefined compiler behavior if they are not. This is analogous +to the status of pattern guards in Scala, which are also required, but not verified, to be pure. + +There is also a problem with `run` in splices. Consider the following expression: + + ’{ (x: Int) => ~{ {’(x)}.run; 1 } } + +This is again phase correct, but will lead us into trouble. Indeed, evaluating the splice will reduce the +expression `{’(x)}.run` to `x`. But then the result + + ’{ (x: Int) => ~{ x; 1 } } + +is no longer phase correct. To prevent this soundness hole it seems easiest to classify `run` as a side-effecting +operation. It would thus be prevented from appearing in splices. In a base language with side-effects we'd have to +do this anyway: Since `run` runs arbitrary code it can always produce a side effect if the code it runs produces one. + +### The `Liftable` type-class + +Consider the following implementation of a staged interpreter that implements +a compiler through staging. + + import scala.quoted._ + + enum Exp { + case Num(n: Int) + case Plus(e1: Exp, e2: Exp) + case Var(x: String) + case Let(x: String, e: Exp, in: Exp) + } + +The interpreted language consists of numbers `Num`, addition `Plus`, and variables +`Var` which are bound by `Let`. Here are two sample expressions in the language: + + val exp = Plus(Plus(Num(2), Var("x")), Num(4)) + val letExp = Let("x", Num(3), exp) + +Here’s a compiler that maps an expression given in the interpreted +language to quoted Scala code of type `Expr[Int]`. +The compiler takes an environment that maps variable names to Scala `Expr`s. + + def compile(e: Exp, env: Map[String, Expr[Int]]): Expr[Int] = e match { + case Num(n) => + n + case Plus(e1, e2) => + ’(~compile(e1, env) + ~compile(e2, env)) + case Var(x) => + env(x) + case Let(x, e, body) => + ’{ val y = ~compile(e, env); ~compile(body, env + (x -> ’(y))) } + } + +Running `compile(letExp, Map())` would yield the following Scala code: + + ’{ val y = 3; (2 + y) + 4 } + +The body of the first clause, `case Num(n) => n`, looks suspicious. `n` +is declared as an `Int`, yet the result of `compile` is declared to be +`Expr[Int]`. Shouldn’t `n` be quoted? In fact this would not +work since replacing `n` by `’n` in the clause would not be phase +correct. + +What happens instead "under the hood" is an implicit conversion: `n` +is expanded to `scala.quoted.Expr.toExpr(n)`. The `toExpr` conversion +is defined in the companion object of class `Expr` as follows: + + object Expr { + implicit def toExpr[T](x: T)(implicit ev: Liftable[T]): Expr[T] = + ev.toExpr(x) + } + +The conversion says that values of types implementing the `Liftable` +type class can be converted ("lifted") automatically to `Expr` +values. Dotty comes with instance definitions of `Liftable` for +several types including all underlying types of literals. For example, +`Int` values can be converted to `Expr[Int]` values by wrapping the +value in a `Literal` tree node. This makes use of the underlying tree +representation in the compiler for efficiency. But the `Liftable` +instances are nevertheless not "magic" in the sense that they could +all be defined in a user program without knowing anything about the +representation of `Expr` trees. For instance, here is a possible +instance of `Liftable[Boolean]`: + + implicit def BooleanIsLiftable: Liftable[Boolean] = new { + implicit def toExpr(b: Boolean) = if (b) ’(true) else ’(false) + } + +Once we can lift bits, we can work our way up. For instance, here is a +possible implementation of `Liftable[Int]` that does not use the underlying +tree machinery: + + implicit def IntIsLiftable: Liftable[Int] = new { + def toExpr(n: Int): Expr[Int] = n match { + case Int.MinValue => ’(Int.MinValue) + case _ if n < 0 => ’(-(~toExpr(n))) + case 0 => ’(0) + case _ if n % 2 == 0 => ’(~toExpr(n / 2) * 2) + case _ => ’(~toExpr(n / 2) * 2 + 1) + } + } + +Since `Liftable` is a type class, its instances can be conditional. For example, +a `List` is liftable if its element type is: + + implicit def ListIsLiftable[T: Liftable]: Liftable[List[T]] = new { + def toExpr(xs: List[T]): Expr[List[T]] = xs match { + case x :: xs1 => ’(~implicitly[Liftable[T]].toExpr(x) :: ~toExpr(xs1)) + case Nil => ’(Nil: List[T]) + } + } + +In the end, `Liftable` resembles very much a serialization +framework. Like the latter it can be derived systematically for all +collections, case classes and enums. + +## Implementation + +### Syntax changes + +A splice `~e` on an expression of type `Expr[T]` is a normal prefix +operator. To make it work as a type operator on `Type[T]` as well, we +need a syntax change that introduces prefix operators as types. + + SimpleType ::= ... + [‘-’ | ‘+’ | ‘~’ | ‘!’] StableId + +Analogously to the situation with expressions, a prefix type operator +such as `~ e` is treated as a shorthand for the type `e.unary_~`. + +Quotes are supported by introducing new tokens `’(`, `’{`, and `’[` +and adding quoted variants `’(...)`, `’{...}` and `’[...]` to the +`SimpleExpr` productions. + + SimpleExpr ::= ... + | ‘’{’ BlockExprContents ‘}’ + | ‘’’ ‘(’ ExprsInParens ‘)’ + | ‘’’ ‘[’ Type ‘]’ + +Syntax changes are given relative to the [Dotty reference +grammar](../internal/syntax.md). + +An alternative syntax would treat `’` as a separate operator. This +would be attractive since it enables quoting single identifiers as +e.g. `’x` instead of `’(x)`. But it would clash with symbol +literals. So it could be done only if symbol literals were abolished. + +### Implementation in `dotc` + +Quotes and splices are primitive forms in the generated abstract +syntax trees. They are eliminated in an expansion phase +`ReifyQuotes`. This phase runs after typing and pickling. + +Macro-expansion works outside-in. If the outermost scope is a splice, +the spliced AST will be evaluated in an interpreter. A call to a +previously compiled method can be implemented as a reflective call to +that method. With the restrictions on splices that are currently in +place that’s all that’s needed. We might allow more interpretation in +splices in the future, which would allow us to loosen the +restriction. Quotes in spliced, interpreted code are kept as they +are, after splices nested in the quotes are expanded. + +If the outermost scope is a quote, we need to generate code that +constructs the quoted tree at run-time. We implement this by +serializing the tree as a Tasty structure, which is stored +in a string literal. At runtime, an unpickler method is called to +deserialize the string into a tree. + +Splices inside quoted code insert the spliced tree as is, after +expanding any quotes in the spliced code recursively. + +## Formalization + +The phase consistency principle can be formalized in a calculus that +extends simply-typed lambda calculus with quotes and splices. + +### Syntax + +The syntax of terms, values, and types is given as follows: + + Terms t ::= x variable + (x: T) => t lambda + t t application + ’t quote + ~t splice + + Values v ::= (x: T) => t lambda + ’q pure quote + + Quoted q ::= x | (x: T) => q | q q | ’t + + Types T ::= A base type + T -> T function type + expr T quoted + +Typing rules are formulated using a stack of environments +`Es`. Individual environments `E` consist as usual of variable +bindings `x: T`. Environments can be combined using the two +combinators `’` and `~`. + + Environment E ::= () empty + E, x: T + + Env. stack Es ::= () empty + E simple + Es * Es combined + + Separator * ::= ’ + ~ + +The two environment combinators are both associative with left and +right identity `()`. + +### Operational semantics: + +We define a small step reduction relation `-->` with the following rules: + + ((x: T) => t) v --> [x := v]t + + ~(’t) --> t + + t1 --> t2 + ----------------- + e[t1] --> e[t2] + +The first rule is standard call-by-value beta-reduction. The second +rule says that splice and quotes cancel each other out. The third rule +is a context rule; it says that reduction is allowed in the hole `[ ]` +position of an evaluation context. Evaluation contexts `e` and +splice evaluation context `e_s` are defined syntactically as follows: + + Eval context e ::= [ ] | e t | v e | ’e_s[~e] + Splice context e_s ::= [ ] | (x: T) => e_s | e_s t | q e_s + +### Typing rules + +Typing judgments are of the form `Es |- t: T`. There are two +substructural rules which express the fact that quotes and splices +cancel each other out: + + Es1 * Es2 |- t: T + --------------------------- + Es1 ~ E1 ’ E2 * Es2 |- t: T + + + Es1 * Es2 |- t: T + --------------------------- + Es1 ’ E1 ~ E2 * Es2 |- t: T + +The lambda calculus fragment of the rules is standard, except that we +use a stack of environments. The rules only interact with the topmost +environment of the stack. + + x: T in E + -------------- + Es * E |- x: T + + + Es * E, x: T1 |- t: T2 + ------------------------------- + Es * E |- (x: T1) => t: T -> T2 + + + Es |- t1: T2 -> T Es |- t2: T2 + --------------------------------- + Es |- t1 t2: T + +The rules for quotes and splices map between `expr T` and `T` by trading `’` and `~` between +environments and terms. + + Es ~ () |- t: expr T + -------------------- + Es |- ~t: T + + + Es ’ () |- t: T + ---------------- + Es |- ’t: expr T + +## Going Further + +The meta-programming framework as presented and currently implemented is quite restrictive +in that it does not allow for the inspection of quoted expressions and +types. It’s possible to work around this by providing all necessary +information as normal, unquoted inline parameters. But we would gain +more flexibility by allowing for the inspection of quoted code with +pattern matching. This opens new possibilities. For instance, here is a +version of `power` that generates the multiplications directly if the +exponent is statically known and falls back to the dynamic +implementation of power otherwise. + + inline def power(n: Int, x: Double): Double = ~{ + ’(n) match { + case Constant(n1) => powerCode(n1, ’(x)) + case _ => ’{ dynamicPower(n, x) } + } + } + + private def dynamicPower(n: Int, x: Double): Double = + if (n == 0) 1.0 + else if (n % 2 == 0) dynamicPower(n / 2, x * x) + else x * dynamicPower(n - 1, x) + +This assumes a `Constant` extractor that maps tree nodes representing +constants to their values. + +With the right extractors the "AsFunction" operation +that maps expressions over functions to functions over expressions can +be implemented in user code: + + implicit class AsFunction[T, U](f: Expr[T => U]) extends AnyVal { + def apply(x: Expr[T]): Expr[U] = + f match { + case Lambda(g) => g(x) + case _ => ’((~f)(~x)) + } + +This assumes an extractor + + object Lambda { + def unapply[T, U](x: Expr[T => U]): Option[Expr[T] => Expr[U]] + } + +Once we allow inspection of code via extractors, it’s tempting to also +add constructors that create typed trees directly without going +through quotes. Most likely, those constructors would work over `Expr` +types which lack a known type argument. For instance, an `Apply` +constructor could be typed as follows: + + def Apply(fn: Expr[_], args: List[Expr[_]]): Expr[_] + +This would allow constructing applications from lists of arguments +without having to match the arguments one-by-one with the +corresponding formal parameter types of the function. We then need "at +the end" a method to convert an `Expr[_]` to an `Expr[T]` where `T` is +given from the outside. E.g. if `code` yields a `Expr[_]`, then +`code.atType[T]` yields an `Expr[T]`. The `atType` method has to be +implemented as a primitive; it would check that the computed type +structure of `Expr` is a subtype of the type structure representing +`T`. + +Before going down that route, we should evaluate in detail the tradeoffs it +presents. Constructing trees that are only verified _a posteriori_ +to be type correct loses a lot of guidance for constructing the right +trees. So we should wait with this addition until we have more +use-cases that help us decide whether the loss in type-safety is worth +the gain in flexibility. In this context, it seems that deconstructing types is +less error-prone than deconstructing terms, so one might also +envisage a solution that allows the former but not the latter. + +## Conclusion + +Meta-programming has a reputation of being difficult and confusing. +But with explicit `Expr/Type` types and quotes and splices it can become +downright pleasant. A simple strategy first defines the underlying quoted or unquoted +values using `Expr` and `Type` and then inserts quotes and splices to make the types +line up. Phase consistency is at the same time a great guideline +where to insert a splice or a quote and a vital sanity check that +the result makes sense. diff --git a/docs/sidebar.yml b/docs/sidebar.yml index 09228fc8d360..baca398e3e89 100644 --- a/docs/sidebar.yml +++ b/docs/sidebar.yml @@ -43,10 +43,12 @@ sidebar: subsection: - title: Multiversal Equality url: docs/reference/multiversal-equality.html - - title: Inline - url: docs/reference/inline.html - title: Trait Parameters url: docs/reference/trait-parameters.html + - title: Inline + url: docs/reference/inline.html + - title: Symmetric Meta Programming + url: docs/reference/symmetric-meta-programming.html - title: By-Name Implicits url: docs/reference/implicit-by-name-parameters.html - title: Auto Parameter Tupling diff --git a/library/src/scala/quoted/Expr.scala b/library/src/scala/quoted/Expr.scala new file mode 100644 index 000000000000..ef6652782dc7 --- /dev/null +++ b/library/src/scala/quoted/Expr.scala @@ -0,0 +1,15 @@ +package scala.quoted + +class Expr[T] extends Quoted { + def unary_~ : T = ??? + def run: T = ??? +} + +object Expr { + implicit def toExpr[T](x: T)(implicit ev: Liftable[T]): Expr[T] = + ev.toExpr(x) + + implicit class AsFunction[T, U](private val f: Expr[T => U]) extends AnyVal { + def apply(x: Expr[T]): Expr[U] = ??? + } +} diff --git a/library/src/scala/quoted/Liftable.scala b/library/src/scala/quoted/Liftable.scala new file mode 100644 index 000000000000..9e64a7c1ed8e --- /dev/null +++ b/library/src/scala/quoted/Liftable.scala @@ -0,0 +1,18 @@ +package scala.quoted + +/** A typeclass for types that can be turned to `quoted.Expr[T]` + * without going through an explicit `'(...)` operation. + */ +abstract class Liftable[T] { + implicit def toExpr(x: T): Expr[T] +} + +/** Some liftable base types. To be completed with at least all types + * that are valid Scala literals. The actual implementation of these + * typed could be in terms of `ast.tpd.Literal`; the test `quotable.scala` + * gives an alternative implementation using just the basic staging system. + */ +object Liftable { + implicit def IntIsLiftable: Liftable[Int] = ??? + implicit def BooleanIsLiftable: Liftable[Boolean] = ??? +} diff --git a/library/src/scala/quoted/Quoted.scala b/library/src/scala/quoted/Quoted.scala new file mode 100644 index 000000000000..b425c0bfb874 --- /dev/null +++ b/library/src/scala/quoted/Quoted.scala @@ -0,0 +1,6 @@ +package scala.quoted + +/** Common superclass of Expr and Type */ +class Quoted + + diff --git a/library/src/scala/quoted/Type.scala b/library/src/scala/quoted/Type.scala new file mode 100644 index 000000000000..ad22a6da90e0 --- /dev/null +++ b/library/src/scala/quoted/Type.scala @@ -0,0 +1,11 @@ +package scala.quoted + +class Type[T] extends Quoted { + type unary_~ = T +} + +/** Some basic type tags, currently incomplete */ +object Type { + implicit def IntTag: Type[Int] = new Type[Int] + implicit def BooleanTag: Type[Boolean] = new Type[Boolean] +} diff --git a/library/src/scala/runtime/quoted/Unpickler.scala b/library/src/scala/runtime/quoted/Unpickler.scala new file mode 100644 index 000000000000..2073f7516f0c --- /dev/null +++ b/library/src/scala/runtime/quoted/Unpickler.scala @@ -0,0 +1,22 @@ +package scala.runtime.quoted + +import scala.quoted._ + +/** Provides methods to unpickle `Expr` and `Type` trees. */ +object Unpickler { + + /** Representation of pickled trees. For now it's String, but it + * should be changed to some kind of TASTY bundle. + */ + type Pickled = String + + /** Unpickle `repr` which represents a pickled `Expr` tree, + * replacing splice nodes with `args` + */ + def unpickleExpr[T](repr: Pickled, args: Seq[Any]): Expr[T] = ??? + + /** Unpickle `repr` which represents a pickled `Type` tree, + * replacing splice nodes with `args` + */ + def unpickleType[T](repr: Pickled, args: Seq[Any]): Type[T] = ??? +} diff --git a/tests/neg/inlinevals.scala b/tests/neg/inlinevals.scala index 184aa2168772..d7d45e248a74 100644 --- a/tests/neg/inlinevals.scala +++ b/tests/neg/inlinevals.scala @@ -17,6 +17,12 @@ object Test { inline val M = X // error: rhs must be constant expression + inline val xs = List(1, 2, 3) // error: must be a constant expression + + def f(inline xs: List[Int]) = xs + + f(List(1, 2, 3)) // error: must be a constant expression + def byname(inline f: => String): Int = ??? // ok byname("hello" ++ " world") diff --git a/tests/neg/quoteTest.scala b/tests/neg/quoteTest.scala new file mode 100644 index 000000000000..37e2fe02c141 --- /dev/null +++ b/tests/neg/quoteTest.scala @@ -0,0 +1,21 @@ +import scala.quoted._ + +class Test { + + val x: Int = 0 + + '{ '(x + 1) // error: wrong staging level + + '((y: Expr[Int]) => ~y ) // error: wrong staging level + + } + + '(x + 1) // error: wrong staging level + + '((y: Expr[Int]) => ~y ) // error: wrong staging level + + def f[T](t: Type[T], x: Expr[T]) = '{ + val z2 = ~x // OK + } + +} diff --git a/tests/neg/quotedMacroOverride.scala b/tests/neg/quotedMacroOverride.scala new file mode 100644 index 000000000000..56a14fe304c8 --- /dev/null +++ b/tests/neg/quotedMacroOverride.scala @@ -0,0 +1,13 @@ +object Test { + + abstract class A { + def f(): Unit + inline def g(): Unit = () + } + + class B extends A { + inline def f() = ~('()) // error: may not override + override def g() = () // error: may not override + } + +} diff --git a/tests/pending/pos/quotedSepComp/Macro_1.scala b/tests/pending/pos/quotedSepComp/Macro_1.scala new file mode 100644 index 000000000000..205f63c937d8 --- /dev/null +++ b/tests/pending/pos/quotedSepComp/Macro_1.scala @@ -0,0 +1,5 @@ +import scala.quoted._ +object Macros { + inline def assert(expr: => Boolean): Unit = ~ assertImpl('(expr)) + def assertImpl(expr: Expr[Boolean]) = '{ () } +} diff --git a/tests/pending/pos/quotedSepComp/Test_2.scala b/tests/pending/pos/quotedSepComp/Test_2.scala new file mode 100644 index 000000000000..42a3748830a0 --- /dev/null +++ b/tests/pending/pos/quotedSepComp/Test_2.scala @@ -0,0 +1,5 @@ +class Test { + import Macros._ + val x = 1 + assert(x != 0) +} diff --git a/tests/pos-from-tasty/i3596.scala b/tests/pos-from-tasty/i3596.scala new file mode 100644 index 000000000000..6f122631779f --- /dev/null +++ b/tests/pos-from-tasty/i3596.scala @@ -0,0 +1,2 @@ +class Bar(ctor : Int => Int) +class Foo extends Bar(x => x) diff --git a/tests/pos-from-tasty/i3597.scala b/tests/pos-from-tasty/i3597.scala new file mode 100644 index 000000000000..04e33975ea2c --- /dev/null +++ b/tests/pos-from-tasty/i3597.scala @@ -0,0 +1,3 @@ +object Test { + def bar(inline n: Int) = n +} diff --git a/tests/pos/liftable.scala b/tests/pos/liftable.scala new file mode 100644 index 000000000000..02023994d9e7 --- /dev/null +++ b/tests/pos/liftable.scala @@ -0,0 +1,28 @@ +import scala.quoted._ + +object Test { + + implicit def IntIsLiftable: Liftable[Int] = new { + def toExpr(n: Int): Expr[Int] = n match { + case Int.MinValue => '(Int.MinValue) + case _ if n < 0 => '(-(~toExpr(n))) + case 0 => '(0) + case _ if n % 2 == 0 => '( ~toExpr(n / 2) * 2) + case _ => '( ~toExpr(n / 2) * 2 + 1) + } + } + + implicit def BooleanIsLiftable: Liftable[Boolean] = new { + implicit def toExpr(b: Boolean) = + if (b) '(true) else '(false) + } + + implicit def ListIsLiftable[T: Liftable]: Liftable[List[T]] = new { + def toExpr(xs: List[T]): Expr[List[T]] = xs match { + case x :: xs1 => '{ ~implicitly[Liftable[T]].toExpr(x) :: ~toExpr(xs1) } + case Nil => '(Nil: List[T]) + } + } + + val xs: Expr[List[Int]] = 1 :: 2 :: 3 :: Nil +} diff --git a/tests/pos/quoteTest.scala b/tests/pos/quoteTest.scala new file mode 100644 index 000000000000..e3671a2ea3e3 --- /dev/null +++ b/tests/pos/quoteTest.scala @@ -0,0 +1,16 @@ +import scala.quoted._ + +object Test { + + def f[T](x: Expr[T])(t: Type[T]) = '{ + val y: t.unary_~ = x.unary_~ + val z = ~x + } + + f('(2))('[Int]) + f('{ true })('[Boolean]) + + def g(es: Expr[String], t: Type[String]) = + f('{ (~es + "!") :: Nil })('[List[~t]]) +} + diff --git a/tests/pos/quoted.scala b/tests/pos/quoted.scala new file mode 100644 index 000000000000..3155c353ca33 --- /dev/null +++ b/tests/pos/quoted.scala @@ -0,0 +1,38 @@ +import scala.quoted._ + +class Test { + + object Macros { + + inline def assert(expr: => Boolean): Unit = + ~ assertImpl('(expr)) + + def assertImpl(expr: Expr[Boolean]) = + '{ if !(~expr) then throw new AssertionError(s"failed assertion: ${~expr}") } + + inline def power(inline n: Int, x: Double) = ~powerCode(n, '(x)) + + def powerCode(n: Int, x: Expr[Double]): Expr[Double] = + if (n == 0) '(1.0) + else if (n == 1) x + else if (n % 2 == 0) '{ { val y = ~x * ~x; ~powerCode(n / 2, '(y)) } } + else '{ ~x * ~powerCode(n - 1, x) } + } + + val program = '{ + import Macros._ + + val x = 1 + assert(x != 0) + + ~assertImpl('(x != 0)) + + val y = math.sqrt(2.0) + + power(3, y) + + ~powerCode(3, '{math.sqrt(2.0)}) + } + + program.run +} diff --git a/tests/pos/spliceTest.scala b/tests/pos/spliceTest.scala new file mode 100644 index 000000000000..c04afdcecb57 --- /dev/null +++ b/tests/pos/spliceTest.scala @@ -0,0 +1,14 @@ +class Expr[T] { + def unary_~ : T = ??? +} +class Type[T] { + type unary_~ = T +} +object Test { + + def f[T](t: Type[T], x: Expr[T]) = { + val y: t.unary_~ = x.unary_~ + val z: ~t = ~x + } + +} diff --git a/tests/pos/stagedInterpreter.scala b/tests/pos/stagedInterpreter.scala new file mode 100644 index 000000000000..2ba57be2b004 --- /dev/null +++ b/tests/pos/stagedInterpreter.scala @@ -0,0 +1,35 @@ +import scala.quoted._ + +enum Exp { + case Num(n: Int) + case Plus(e1: Exp, e2: Exp) + case Var(x: String) + case Let(x: String, e: Exp, in: Exp) +} + +object Test { + import Exp._ + + val keepLets = true + + val exp = Plus(Plus(Num(2), Var("x")), Num(4)) + + val letExp = Let("x", Num(3), exp) + + def compile(e: Exp, env: Map[String, Expr[Int]]): Expr[Int] = e match { + case Num(n) => n + case Plus(e1, e2) => '(~compile(e1, env) + ~compile(e2, env)) + case Var(x) => env(x) + case Let(x, e, body) => + if (keepLets) + '{ val y = ~compile(e, env); ~compile(body, env + (x -> '(y))) } + else + compile(body, env + (x -> compile(e, env))) + } + + val res1 = '{ (x: Int) => ~compile(exp, Map("x" -> '(x))) } + + val res2 = compile(letExp, Map()) + + res1.run +} diff --git a/tests/run/i3006b.check b/tests/run/i3006b.check index b62605500d51..3698829ef115 100644 --- a/tests/run/i3006b.check +++ b/tests/run/i3006b.check @@ -1,3 +1,3 @@ -Foo$$_$bar$1 -Foo$$_$bar$2 -Bar$$_$bar$1 +bar$1 +bar$2 +bar$1