diff --git a/common/src/main/scala/scalan/package.scala b/common/src/main/scala/scalan/package.scala index ab705c62e8..f89cc78d53 100644 --- a/common/src/main/scala/scalan/package.scala +++ b/common/src/main/scala/scalan/package.scala @@ -5,5 +5,19 @@ package object scalan { /** Allows implicit resolution to find appropriate instance of ClassTag in * the scope where RType is implicitly available. */ implicit def rtypeToClassTag[A](implicit t: RType[A]): ClassTag[A] = t.classTag - + + /** Immutable empty array of integers, should be used instead of allocating new empty arrays. */ + val EmptyArrayOfInt = Array.empty[Int] + + /** Immutable empty Seq[Int] backed by empty array. + * You should prefer using it instead of `Seq[Int]()` or `Seq.empty[Int]` + */ + val EmptySeqOfInt: Seq[Int] = EmptyArrayOfInt + + /** Create a new empty buffer around pre-allocated empty array. + * This method is preferred, rather that creating empty debox.Buffer directly + * because it allows to avoid allocation of the empty array. + */ + def emptyDBufferOfInt: debox.Buffer[Int] = debox.Buffer.unsafe(EmptyArrayOfInt) + } diff --git a/common/src/main/scala/scalan/util/CollectionUtil.scala b/common/src/main/scala/scalan/util/CollectionUtil.scala index bba14c516f..db16c17088 100644 --- a/common/src/main/scala/scalan/util/CollectionUtil.scala +++ b/common/src/main/scala/scalan/util/CollectionUtil.scala @@ -61,6 +61,7 @@ object CollectionUtil { res.toMap } + // TODO optimize: using cfor and avoiding allocations def joinSeqs[O, I, K](outer: GenIterable[O], inner: GenIterable[I])(outKey: O=>K, inKey: I=>K): GenIterable[(O,I)] = { val kvs = createMultiMap(inner.map(i => (inKey(i), i))) val res = outer.flatMap(o => { diff --git a/core/src/main/scala/scalan/Base.scala b/core/src/main/scala/scalan/Base.scala index 75b0b0a5de..d151bbeb0c 100644 --- a/core/src/main/scala/scalan/Base.scala +++ b/core/src/main/scala/scalan/Base.scala @@ -9,6 +9,7 @@ import scalan.compilation.GraphVizConfig import scalan.util.StringUtil import debox.{Buffer => DBuffer} import spire.syntax.all.cfor +import scala.collection.mutable /** * The Base trait houses common AST nodes. It also manages a list of encountered definitions which @@ -92,7 +93,7 @@ abstract class Base { scalan: Scalan => _elements(i + 1) = element Def.extractSyms(element, symsBuf) } - _syms = symsBuf.toArray() + _syms = if (symsBuf.length > 0) symsBuf.toArray() else EmptyArrayOfSym } /** References to other nodes in this Def instance. @@ -236,7 +237,7 @@ abstract class Base { scalan: Scalan => override def mirror(t: Transformer): Ref[T] = self } - /** Describes lifting data values of type ST (Source Type) to IR nodes of the correspoding staged type T. + /** Describes lifting data values of type ST (Source Type) to IR nodes of the corresponding staged type T. * In general T is different type obtained by virtualization procedure from ST. * However ST can be the same as T as is the case for Byte, Int, String etc. */ @@ -452,22 +453,28 @@ abstract class Base { scalan: Scalan => /** Transform a sequence of nodes into new sequence of nodes. */ final def apply[A](xs: Seq[Ref[A]]): Seq[Ref[A]] = { val len = xs.length - val res = new Array[Ref[A]](len) - cfor(0)(_ < len, _ + 1) { i => - res(i) = apply(xs(i)) + if (len == 0) EmptySeqOfSym.asInstanceOf[Seq[Ref[A]]] + else { + val res = new Array[Ref[A]](len) + cfor(0)(_ < len, _ + 1) { i => + res(i) = apply(xs(i)) + } + res } - res } /** Apply this transformer to the nodes present in the sequence, * and leave non-Ref items unchanged. */ final def apply(xs: Seq[Any])(implicit o: Overloaded1): Seq[Any] = { val len = xs.length - val res = new Array[Any](len) - cfor(0)(_ < len, _ + 1) { i => - val x = xs(i) match { case s: Ref[_] => apply(s); case s => s } - res(i) = x + if (len == 0) mutable.WrappedArray.empty + else { + val res = new Array[Any](len) + cfor(0)(_ < len, _ + 1) { i => + val x = xs(i) match { case s: Ref[_] => apply(s); case s => s } + res(i) = x + } + res } - res } def +[A](key: Sym, value: Sym): Transformer @@ -815,5 +822,23 @@ abstract class Base { scalan: Scalan => } while (res != currSym) res } -} + /** Immutable empty array of symbols, can be used to avoid unnecessary allocations. */ + val EmptyArrayOfSym = Array.empty[Sym] + + /** Immutable empty Seq, can be used to avoid unnecessary allocations. */ + val EmptySeqOfSym: Seq[Sym] = EmptyArrayOfSym + + /** Create a new empty buffer around pre-allocated empty array. + * This method is preferred, rather that creating empty debox.Buffer directly + * because it allows to avoid allocation of the empty array. + */ + @inline final def emptyDBufferOfSym: DBuffer[Sym] = DBuffer.unsafe(EmptyArrayOfSym) + + /** Used internally in IR and should be used with care since it is mutable. + * At the same time, it is used in the hotspot and allows to avoid roughly tens of + * thousands of allocations per second. + * WARNING: Mutations of this instance can lead to undefined behavior. + */ + protected val EmptyDSetOfInt: debox.Set[Int] = debox.Set.empty +} diff --git a/core/src/main/scala/scalan/Scalan.scala b/core/src/main/scala/scalan/Scalan.scala index 9ce1599469..c2e9359261 100644 --- a/core/src/main/scala/scalan/Scalan.scala +++ b/core/src/main/scala/scalan/Scalan.scala @@ -1,8 +1,7 @@ package scalan -import scalan.compilation.GraphVizExport import scalan.primitives._ -import scalan.staged.{Transforming} +import scalan.staged.Transforming /** Aggregate cake with all inter-dependent modules assembled together. * Each instance of this class contains independent IR context, thus many @@ -30,7 +29,6 @@ class Scalan with Functions with IfThenElse with Transforming -// with GraphVizExport with Thunks with Entities with Modules diff --git a/core/src/main/scala/scalan/TypeDescs.scala b/core/src/main/scala/scalan/TypeDescs.scala index 9b669e3311..b63d49c2bb 100644 --- a/core/src/main/scala/scalan/TypeDescs.scala +++ b/core/src/main/scala/scalan/TypeDescs.scala @@ -81,7 +81,7 @@ abstract class TypeDescs extends Base { self: Scalan => // } // } - // TODO optimize performance hot spot (45% of invokeUnlifted time) + // TODO optimize performance hot spot (45% of invokeUnlifted time), reduce allocation of Some final def getSourceValues(dataEnv: DataEnv, forWrapper: Boolean, stagedValues: AnyRef*): Seq[AnyRef] = { import OverloadHack._ val limit = stagedValues.length @@ -151,7 +151,7 @@ abstract class TypeDescs extends Base { self: Scalan => !!!(s"Cannot get Liftable instance for $this") final lazy val sourceType: RType[_] = liftable.sourceType - protected def collectMethods: Map[Method, MethodDesc] = Map() + protected def collectMethods: Map[Method, MethodDesc] = Map() // TODO optimize: all implementations protected lazy val methods: Map[Method, MethodDesc] = collectMethods // TODO benchamrk against the version below it @@ -244,6 +244,7 @@ abstract class TypeDescs extends Base { self: Scalan => m.getName } + // TODO optimize /** Build a mapping between methods of staged class and the corresponding methods of source class. * The methods are related using names. * The computed mapping can be used to project MethodCalls IR nodes back to the corresponding diff --git a/core/src/main/scala/scalan/primitives/Equal.scala b/core/src/main/scala/scalan/primitives/Equal.scala index 32dc525f89..e2a35acf15 100644 --- a/core/src/main/scala/scalan/primitives/Equal.scala +++ b/core/src/main/scala/scalan/primitives/Equal.scala @@ -4,10 +4,14 @@ import scalan.{Base, Scalan} trait Equal extends Base { self: Scalan => /** Binary operation representing structural equality between arguments. */ - case class Equals[A: Elem]() extends BinOp[A, Boolean]("==", equalValues[A](_, _)) + case class Equals[A: Elem]() extends BinOp[A, Boolean]("==") { + override def applySeq(x: A, y: A): Boolean = equalValues[A](x, y) + } /** Binary operation representing structural inequality between arguments. */ - case class NotEquals[A: Elem]() extends BinOp[A, Boolean]("!=", !equalValues[A](_, _)) + case class NotEquals[A: Elem]() extends BinOp[A, Boolean]("!=") { + override def applySeq(x: A, y: A): Boolean = !equalValues[A](x, y) + } protected def equalValues[A](x: Any, y: Any)(implicit eA: Elem[A]) = x == y diff --git a/core/src/main/scala/scalan/primitives/Functions.scala b/core/src/main/scala/scalan/primitives/Functions.scala index 1fc9844344..0acad6ca81 100644 --- a/core/src/main/scala/scalan/primitives/Functions.scala +++ b/core/src/main/scala/scalan/primitives/Functions.scala @@ -4,7 +4,7 @@ import java.util import scalan.staged.ProgramGraphs import scalan.util.GraphUtil -import scalan.{Lazy, Base, Nullable, Scalan} +import scalan.{Nullable, emptyDBufferOfInt, Base, Lazy, Scalan} import debox.{Buffer => DBuffer} import scala.language.implicitConversions @@ -120,7 +120,7 @@ trait Functions extends Base with ProgramGraphs { self: Scalan => override lazy val scheduleIds: DBuffer[Int] = { val sch = if (isIdentity) - DBuffer.ofSize[Int](0) + emptyDBufferOfInt else { // graph g will contain all Defs reified as part of this Lambda, (due to `filterNode`) // BUT not all of them depend on boundVars, thus we need to filter them out @@ -165,15 +165,6 @@ trait Functions extends Base with ProgramGraphs { self: Scalan => } override protected def getDeps: Array[Sym] = freeVars.toArray - - def isGlobalLambda: Boolean = { - freeVars.forall { x => - x.isConst || { - val xIsGlobalLambda = x.isLambda && { val lam = x.node.asInstanceOf[Lambda[_, _]]; lam.isGlobalLambda } - xIsGlobalLambda - } - } - } } type LambdaData[A,B] = (Lambda[A,B], Nullable[Ref[A] => Ref[B]], Ref[A], Ref[B]) diff --git a/core/src/main/scala/scalan/primitives/LogicalOps.scala b/core/src/main/scala/scalan/primitives/LogicalOps.scala index fd71800d95..e81b546139 100644 --- a/core/src/main/scala/scalan/primitives/LogicalOps.scala +++ b/core/src/main/scala/scalan/primitives/LogicalOps.scala @@ -2,17 +2,34 @@ package scalan.primitives import scalan.{Base, Scalan} +/** Slice in Scala cake with definitions of logical operations. */ trait LogicalOps extends Base { self: Scalan => - val And = new EndoBinOp[Boolean]("&&", _ && _) + /** Logical AND binary operation. */ + val And = new EndoBinOp[Boolean]("&&") { + override def applySeq(x: Boolean, y: Boolean): Boolean = x && y + } - val Or = new EndoBinOp[Boolean]("||", _ || _) + /** Logical AND binary operation. */ + val Or = new EndoBinOp[Boolean]("||") { + override def applySeq(x: Boolean, y: Boolean): Boolean = x || y + } - val Not = new EndoUnOp[Boolean]("!", !_) + /** Logical NOT unary operation. */ + val Not = new EndoUnOp[Boolean]("!") { + override def applySeq(x: Boolean): Boolean = !x + } - val BinaryXorOp = new EndoBinOp[Boolean]("^", _ ^ _) + /** Logical XOR binary operation. */ + val BinaryXorOp = new EndoBinOp[Boolean]("^") { + override def applySeq(x: Boolean, y: Boolean): Boolean = x ^ y + } - val BooleanToInt = new UnOp[Boolean, Int]("ToInt", if (_) 1 else 0) + /** Boolean to Int conversion unary operation. */ + val BooleanToInt = new UnOp[Boolean, Int]("ToInt") { + override def applySeq(x: Boolean): Int = if (x) 1 else 0 + } + /** Extension methods over `Ref[Boolean]`. */ implicit class RepBooleanOps(value: Ref[Boolean]) { def &&(y: Ref[Boolean]): Ref[Boolean] = And(value, y) def ||(y: Ref[Boolean]): Ref[Boolean] = Or(value, y) @@ -26,6 +43,7 @@ trait LogicalOps extends Base { self: Scalan => } + /** Helper method which defines rewriting rules with boolean constants. */ @inline final def rewriteBoolConsts(lhs: Sym, rhs: Sym, ifTrue: Sym => Sym, ifFalse: Sym => Sym, ifEqual: Sym => Sym, ifNegated: Sym => Sym): Sym = lhs match { diff --git a/core/src/main/scala/scalan/primitives/NumericOps.scala b/core/src/main/scala/scalan/primitives/NumericOps.scala index 70216627d4..deca140dc4 100644 --- a/core/src/main/scala/scalan/primitives/NumericOps.scala +++ b/core/src/main/scala/scalan/primitives/NumericOps.scala @@ -2,7 +2,10 @@ package scalan.primitives import scalan.{ExactNumeric, Base, Scalan, ExactIntegral} +/** Slice in Scala cake with definitions of numeric operations. */ trait NumericOps extends Base { self: Scalan => + + /** Extension methods over `Ref[T]` where T is instance of ExactNumeric type-class. */ implicit class NumericOpsCls[T](x: Ref[T])(implicit val n: ExactNumeric[T]) { def +(y: Ref[T]): Ref[T] = NumericPlus(n)(x.elem).apply(x, y) def -(y: Ref[T]): Ref[T] = NumericMinus(n)(x.elem).apply(x, y) @@ -15,6 +18,7 @@ trait NumericOps extends Base { self: Scalan => def toLong: Ref[Long] = NumericToLong(n).apply(x) } + /** Extension methods over `Ref[T]` where T is instance of ExactIntegral type-class. */ implicit class IntegralOpsCls[T](x: Ref[T])(implicit i: ExactIntegral[T]) { def div(y: Ref[T]): Ref[T] = IntegralDivide(i)(x.elem).apply(x, y) def mod(y: Ref[T]): Ref[T] = IntegralMod(i)(x.elem).apply(x, y) @@ -23,35 +27,80 @@ trait NumericOps extends Base { self: Scalan => def %(y: Ref[T]): Ref[T] = mod(y) } + /** Return an ExactNumeric for a given type T. */ def numeric[T:ExactNumeric]: ExactNumeric[T] = implicitly[ExactNumeric[T]] + + /** Return an ExactIntegral for a given type T. */ def integral[T:ExactIntegral]: ExactIntegral[T] = implicitly[ExactIntegral[T]] - case class NumericPlus[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("+", n.plus) + /** Descriptor of binary `+` operation. */ + case class NumericPlus[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("+") { + override def applySeq(x: T, y: T): T = n.plus(x, y) + } - case class NumericMinus[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("-", n.minus) + /** Descriptor of binary `-` operation. */ + case class NumericMinus[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("-") { + override def applySeq(x: T, y: T): T = n.minus(x, y) + } - case class NumericTimes[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("*", n.times) + /** Descriptor of binary `*` operation. */ + case class NumericTimes[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("*") { + override def applySeq(x: T, y: T): T = n.times(x, y) + } - class DivOp[T: Elem](opName: String, applySeq: (T, T) => T, n: ExactIntegral[T]) extends EndoBinOp[T](opName, applySeq) { + /** Base class for descriptors of binary division operations. */ + abstract class DivOp[T: Elem](opName: String, n: ExactIntegral[T]) extends EndoBinOp[T](opName) { override def shouldPropagate(lhs: T, rhs: T) = rhs != n.zero } - case class NumericNegate[T: Elem](n: ExactNumeric[T]) extends UnOp[T, T]("-", n.negate) + /** Descriptor of unary `-` operation. */ + case class NumericNegate[T: Elem](n: ExactNumeric[T]) extends UnOp[T, T]("-") { + override def applySeq(x: T): T = n.negate(x) + } - case class NumericToDouble[T](n: ExactNumeric[T]) extends UnOp[T,Double]("ToDouble", n.toDouble) + /** Descriptor of unary `ToDouble` conversion operation. */ + case class NumericToDouble[T](n: ExactNumeric[T]) extends UnOp[T,Double]("ToDouble") { + override def applySeq(x: T): Double = n.toDouble(x) + } - case class NumericToFloat[T](n: ExactNumeric[T]) extends UnOp[T, Float]("ToFloat", n.toFloat) + /** Descriptor of unary `ToFloat` conversion operation. */ + case class NumericToFloat[T](n: ExactNumeric[T]) extends UnOp[T, Float]("ToFloat") { + override def applySeq(x: T): Float = n.toFloat(x) + } - case class NumericToInt[T](n: ExactNumeric[T]) extends UnOp[T,Int]("ToInt", n.toInt) + /** Descriptor of unary `ToInt` conversion operation. */ + case class NumericToInt[T](n: ExactNumeric[T]) extends UnOp[T,Int]("ToInt") { + override def applySeq(x: T): Int = n.toInt(x) + } - case class NumericToLong[T](n: ExactNumeric[T]) extends UnOp[T,Long]("ToLong", n.toLong) + /** Descriptor of unary `ToLong` conversion operation. */ + case class NumericToLong[T](n: ExactNumeric[T]) extends UnOp[T,Long]("ToLong") { + override def applySeq(x: T): Long = n.toLong(x) + } - case class Abs[T: Elem](n: ExactNumeric[T]) extends UnOp[T, T]("Abs", n.abs) + /** Descriptor of unary `abs` operation. */ + case class Abs[T: Elem](n: ExactNumeric[T]) extends UnOp[T, T]("Abs") { + override def applySeq(x: T): T = n.abs(x) + } - case class IntegralDivide[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("/", i.quot, i) + /** Descriptor of binary `/` operation (integral division). */ + case class IntegralDivide[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("/", i) { + override def applySeq(x: T, y: T): T = i.quot(x, y) + } - case class IntegralMod[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("%", i.rem, i) + /** Descriptor of binary `%` operation (reminder of integral division). */ +case class IntegralMod[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("%", i) { + /** Note, this is implemented using `ExactIntegral.rem` method which delegates to + * `scala.math.Integral.rem`. The later also implements `%` operator in Scala for + * numeric types. + * @see sigmastate.eval.NumericOps.BigIntIsIntegral + */ + override def applySeq(x: T, y: T): T = i.rem(x, y) + } + /** Compares the given value with zero of the given ExactNumeric instance. */ @inline final def isZero[T](x: T, n: ExactNumeric[T]) = x == n.zero + + /** Compares the given value with 1 of the given ExactNumeric instance. */ @inline final def isOne[T](x: T, n: ExactNumeric[T]) = x == n.fromInt(1) } diff --git a/core/src/main/scala/scalan/primitives/OrderingOps.scala b/core/src/main/scala/scalan/primitives/OrderingOps.scala index 19045be96a..7d997e1487 100644 --- a/core/src/main/scala/scalan/primitives/OrderingOps.scala +++ b/core/src/main/scala/scalan/primitives/OrderingOps.scala @@ -2,10 +2,12 @@ package scalan.primitives import scalan.{Base, Scalan, ExactOrdering} +/** Slice in Scala cake with definitions of comparison operations. */ trait OrderingOps extends Base { self: Scalan => implicit def repOrderingToOrderingOps[T](x: Ref[T])(implicit n: ExactOrdering[T]) = new OrderingOpsCls(x) implicit def OrderingToOrderingOps[T](x: T)(implicit n: ExactOrdering[T], et: Elem[T]) = new OrderingOpsCls(toRep(x)) + /** Extension method over `Ref[T]` given an instance of ExactOrdering for T. */ class OrderingOpsCls[T](lhs: Ref[T])(implicit val n: ExactOrdering[T]) { def <(rhs: Ref[T]) = OrderingLT(n).apply(lhs,rhs) def <=(rhs: Ref[T]) = OrderingLTEQ(n).apply(lhs,rhs) @@ -16,17 +18,38 @@ trait OrderingOps extends Base { self: Scalan => def compare(rhs: Ref[T]): Ref[Int] = OrderingCompare(n).apply(lhs,rhs) } - case class OrderingLT[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean]("<", ord.lt) + /** Descriptor of binary `<` operation. */ + case class OrderingLT[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean]("<") { + override def applySeq(x: T, y: T): Boolean = ord.lt(x, y) + } - case class OrderingLTEQ[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean]("<=", ord.lteq) + /** Descriptor of binary `<=` operation. */ + case class OrderingLTEQ[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean]("<=") { + override def applySeq(x: T, y: T): Boolean = ord.lteq(x, y) + } - case class OrderingGT[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean](">", ord.gt) + /** Descriptor of binary `>` operation. */ + case class OrderingGT[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean](">") { + override def applySeq(x: T, y: T): Boolean = ord.gt(x, y) + } - case class OrderingGTEQ[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean](">=", ord.gteq) + /** Descriptor of binary `>=` operation. */ + case class OrderingGTEQ[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean](">=") { + override def applySeq(x: T, y: T): Boolean = ord.gteq(x, y) + } - case class OrderingMax[T: Elem](ord: ExactOrdering[T]) extends BinOp[T, T]("max", ord.max) + /** Descriptor of binary `max` operation. */ + case class OrderingMax[T: Elem](ord: ExactOrdering[T]) extends BinOp[T, T]("max") { + override def applySeq(x: T, y: T): T = ord.max(x, y) + } - case class OrderingMin[T: Elem](ord: ExactOrdering[T]) extends BinOp[T, T]("min", ord.min) + /** Descriptor of binary `min` operation. */ + case class OrderingMin[T: Elem](ord: ExactOrdering[T]) extends BinOp[T, T]("min") { + override def applySeq(x: T, y: T): T = ord.min(x, y) + } - case class OrderingCompare[T](ord: ExactOrdering[T]) extends BinOp[T, Int]("compare", ord.compare) + /** Descriptor of binary `compare` operation. */ + case class OrderingCompare[T](ord: ExactOrdering[T]) extends BinOp[T, Int]("compare") { + override def applySeq(x: T, y: T): Int = ord.compare(x, y) + } } \ No newline at end of file diff --git a/core/src/main/scala/scalan/primitives/Thunks.scala b/core/src/main/scala/scalan/primitives/Thunks.scala index 739285f648..323fd26042 100644 --- a/core/src/main/scala/scalan/primitives/Thunks.scala +++ b/core/src/main/scala/scalan/primitives/Thunks.scala @@ -10,50 +10,85 @@ import scalan.util.{Covariant, GraphUtil} import scala.collection.Seq +/** Slice in the [[Scalan]] cake with definitions of Thunk operations. + * See https://en.wikipedia.org/wiki/Thunk. + * Thunks are used to represent lazy operations in the graph IR. + * @see ApplyBinOpLazy, IfThenElseLazy + */ trait Thunks extends Functions with GraphVizExport { self: Scalan => type Th[+T] = Ref[Thunk[T]] + + /** Phantom type to define thunk-typed graph nodes and thunk based lazy operations. + * Usually used inside [[Ref]], see for example [[Th]]. + * See also for details http://gigiigig.github.io/tlp-step-by-step/phantom-types.html + */ trait Thunk[+A] { def value: A } + + /** A class of factory to create new Thunks by use `Thunk { ... }` expressions. */ class ThunkCompanion { def apply[T](block: => Ref[T]) = thunk_create(block) def forced[T](block: => Ref[T]) = thunk_create(block).force } + + /** Allow expressions like `Thunk { ... }` to create new Thunks. */ val Thunk: ThunkCompanion = new ThunkCompanion + /** Extension methods on `Ref[Thunk[T]]` values. */ implicit class RepThunkOps[T](t: Th[T]) { + /** Forces evaluation of the thunk to produce the delayed value. */ def force() = thunk_force(t) + + /** Creates a new thunk which, when forced, in turn forces `t` and then maps the resulting + * value using `f`. The application of `f` may be inlined to the new thunk body, or + * may be reified as [[Apply]] node, this depends on parameters of Lambda node. + * + * @param f reference to graph node of type [[Lambda]] + */ def map[R](f: Ref[T => R]): Th[R] = thunk_map(t, f) + + /** Creates a new thunk which, when forced, in turn forces `t` and then maps the resulting + * value using `f`. + * @param f scala function which is always inlined (staged) into the new think body + */ def map[R](f: Ref[T] => Ref[R]): Th[R] = thunk_map1(t, f) } + /** Thunk is an instance of container type class [[Cont]]. */ implicit val thunkCont: Cont[Thunk] = new Cont[Thunk] { - def lift[T](implicit eT: Elem[T]) = element[Thunk[T]] - def unlift[T](implicit eFT: Elem[Thunk[T]]) = eFT.eItem - def unapply[T](e: Elem[_]) = e match { + override def lift[T](implicit eT: Elem[T]) = element[Thunk[T]] + override def unlift[T](implicit eFT: Elem[Thunk[T]]) = eFT.eItem + override def unapply[T](e: Elem[_]) = e match { case e: ThunkElem[_] => Some(asElem[Thunk[T]](e)) case _ => None } } import Liftables._ - import scala.reflect.{ClassTag, classTag} + + /** Runtime representation of lazy values. + * Each [[Thunk]] typed graph node evaluates to a value of this type. + */ type SThunk[T] = () => T + /** Graph node to represent constants of type [[Thunk]]. + * @see scalan.Base.Liftables.Liftable */ case class ThunkConst[ST, T](constValue: SThunk[ST], lT: Liftable[ST, T]) extends BaseDef[Thunk[T]]()(thunkElement(lT.eW)) with LiftedConst[SThunk[ST], Thunk[T]] { val liftable: Liftable[SThunk[ST], Thunk[T]] = liftableThunk(lT) } + /** Implementation of Liftable type class for `Thunk[T]` given liftable for `T`. */ case class LiftableThunk[ST, T](lT: Liftable[ST, T]) extends Liftable[SThunk[ST], Thunk[T]] { import RType._ - def eW: Elem[Thunk[T]] = thunkElement(lT.eW) - def sourceType: RType[SThunk[ST]] = { + override def eW: Elem[Thunk[T]] = thunkElement(lT.eW) + override def sourceType: RType[SThunk[ST]] = { implicit val tST = lT.sourceType RType[SThunk[ST]] } - def lift(x: SThunk[ST]): Ref[Thunk[T]] = ThunkConst(x, lT) - def unlift(w: Ref[Thunk[T]]): SThunk[ST] = w.node match { + override def lift(x: SThunk[ST]): Ref[Thunk[T]] = ThunkConst(x, lT) + override def unlift(w: Ref[Thunk[T]]): SThunk[ST] = w.node match { case ThunkConst(x: SThunk[_], l) if l == lT => x.asInstanceOf[SThunk[ST]] case _ => unliftError(w) } @@ -63,6 +98,7 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => LiftableThunk(lT) + /** Implements a type descriptor of `Thunk[A]` type given the instance of `A`. */ case class ThunkElem[A](override val eItem: Elem[A]) extends EntityElem1[A, Thunk[A], Thunk](eItem, container[Thunk]) { override lazy val liftable = asLiftable[SThunk[_], Thunk[A]](liftableThunk(eItem.liftable)) @@ -71,14 +107,24 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => implicit def thunkElement[T](implicit eItem: Elem[T]): Elem[Thunk[T]] = cachedElemByClass(eItem)(classOf[ThunkElem[T]]) + + /** Implicit conversion (downcast) to access `ThunkElem.eItem` field. */ implicit def extendThunkElement[T](elem: Elem[Thunk[T]]): ThunkElem[T] = elem.asInstanceOf[ThunkElem[T]] - class ThunkDef[A](val root: Ref[A], _scheduleIds: =>ScheduleIds) + /** Graph node representing thunk with reified body. + * Each thunk node is a specialized implementation of AstGraph abstract class. + * @param _scheduleIds compact representation of thunk body, i.e. a sequence of graph + * nodes, which will be executed when the thunk is forced. Each + * node is given by its id. The node can be resolved using + * `getSym(id).node` expression. + * @param root graph node, which represents the result value of thunk forcing. + */ + class ThunkDef[A](val root: Ref[A], _scheduleIds: => ScheduleIds) extends AstGraph with Def[Thunk[A]] { implicit def eA: Elem[A] = root.elem private var _selfType: Elem[Thunk[A]] = _ - def resultType: Elem[Thunk[A]] = + override def resultType: Elem[Thunk[A]] = if (_selfType != null) _selfType else { val res = thunkElement(eA) @@ -94,7 +140,7 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => * different nodeIds and hence they are NOT equal. * */ override lazy val hashCode: Int = _nodeId //41 * (41 + root.hashCode) + schedule.hashCode - def canEqual(other: Any) = other.isInstanceOf[ThunkDef[_]] + override def canEqual(other: Any) = other.isInstanceOf[ThunkDef[_]] override def equals(other: Any) = other match { case that: ThunkDef[_] => _nodeId == that._nodeId @@ -104,18 +150,20 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => // Product implementation - def productElement(n: Int): Any = n match { + override def productElement(n: Int): Any = n match { case 0 => root case _ => throw new NoSuchElementException(s"ThunkDef.productElement($n) is undefined") } - def productArity: Int = 1 + override def productArity: Int = 1 + + override def boundVars = EmptySeqOfSym - override def boundVars = Nil - override lazy val freeVars: Seq[Sym] = if (schedule.isEmpty) Array(root) else super.freeVars + override val roots: Seq[Sym] = Array(root) + + override lazy val freeVars: Seq[Sym] = if (schedule.isEmpty) roots else super.freeVars override protected def getDeps: Array[Sym] = freeVars.toArray - val roots: Seq[Sym] = Array(root) override lazy val rootIds: DBuffer[Int] = super.rootIds override def isIdentity: Boolean = false } @@ -126,18 +174,27 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => } } + /** Helper object to handle construction of nested thunks. One instance is created for + * each ThunkDef under construction. This corresponds to syntactic nesting of thunks. + * @param parent the scope of the parent thunk + * @param thunkSym reference to the Graph node for which this scope is created. + */ class ThunkScope(val parent: ThunkScope, val thunkSym: Ref[Any]) { private val bodyIds: DSet[Int] = DSet.ofSize(16) private val bodyDefs: AVHashMap[Def[_], Def[_]] = AVHashMap(32) @inline final def isEmptyBody: Boolean = bodyIds.isEmpty + /** Add the given graph node (by symbol) to this scope. */ def +=(sym: Sym): Unit = { val d = sym.node bodyIds += d.nodeId bodyDefs.put(d, d) } + /** Sort graph nodes of this scope topologically using depth-first search along + * node dependencies (graph edges). This will give evaluation order of the thunk. + */ def scheduleForResult(root: Ref[Any]): DBuffer[Int] = { val sch = GraphUtil.depthFirstOrderFrom( DBuffer(root.node.nodeId), @@ -156,6 +213,11 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => sch } + /** Find the given node among definitions accessible from this scope. + * It searches in the chain of nested scopes for the first match. + * If not found, searches in the global scope. + * @return null if not found at all. + */ def findDef[T](d: Def[T]): Ref[T] = { val existingOpt = bodyDefs.get(d) if (existingOpt.isDefined) return existingOpt.get.self.asInstanceOf[Ref[T]] @@ -166,27 +228,49 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => } } + /** The stack of nested thunks during graph construction. */ class ThunkStack { + /** Stack is represented as simple list with th first element as a top of the stack. */ var stack = List[ThunkScope]() + + /** @return optional top scope. */ @inline def top: Nullable[ThunkScope] = if (stack.isEmpty) Nullable.None else Nullable(stack.head) + + /** Push new scope when start constructing new thunk. */ def push(e: ThunkScope): this.type = { stack = e :: stack; this } + + /** Pop new scope when thunk has been constructed. */ @inline def pop: ThunkScope = { val res = stack.head stack = stack.tail res } + + /** For a given thunk node, create a new scope and push it on the stack. */ def beginScope(thunkSym: Ref[Any]): ThunkScope = { val parent = if (stack.isEmpty) null else stack.head val scope = new ThunkScope(parent, thunkSym) this.push(scope) scope } + + /** End current scope and pop it from the stack. */ @inline def endScope(): Unit = { this.pop } } + protected val thunkStack = new ThunkStack implicit def repToThunk[A](block: Ref[A]): Ref[Thunk[A]] = thunk_create(block) + /** Constructs a new thunk node by executing the given `block` and collecting all the + * graph node created along the way. + * This methods: + * 1) starts a new nested ThunkScope, + * 2) executes the `block` to obtain resulting graph node + * 3) schedule thunk body for execution order + * 4) adds a new ThunkDef node and returns its reference. + * @return a reference to the newly created [[ThunkDef]] node + */ def thunk_create[A](block: => Ref[A]): Ref[Thunk[A]] = { var scheduleIds: ScheduleIds = null val resPH = placeholder(Lazy(AnyElement)).asInstanceOf[Ref[A]] // will be known after block is evaluated @@ -219,18 +303,38 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => } } + /** Specifies thunk staging strategy with respect to handling thunk_force operation. + * @see thunk_force + */ var isInlineThunksOnForce = false + /** Inlines the given thunk by cloning all its nodes and applying the given substitution (transformer). + * @param thunk reference to the thunk node + * @param subst transformer to be applied for each mirrored (cloned) node. + * @return the reference to the graph node, which represents the resulting value of the thunk + */ def forceThunkByMirror[A](thunk: Th[A], subst: MapTransformer = MapTransformer.empty()): Ref[A] = { val th = thunk.node.asInstanceOf[ThunkDef[A]] forceThunkDefByMirror(th, subst) } + + /** Inlines the given thunk by cloning all its nodes and applying the given substitution (transformer). + * @param th the thunk node + * @param subst transformer to be applied for each mirrored (cloned) node. + * @return the reference to the graph node, which represents the resulting value of the thunk + */ def forceThunkDefByMirror[A](th: ThunkDef[A], subst: MapTransformer = MapTransformer.empty()): Ref[A] = { val body = th.scheduleIds val t = DefaultMirror.mirrorSymbols(subst, NoRewriting, th, body) t(th.root) } + /** Logical force of the thunk. Depending on isInlineThunksOnForce it either inlines the + * thunk body or creates a new ThunkForce node. + * + * @return a reference to the graph node, which represent the result of the thunk's + * evaluation. + */ def thunk_force[A](t: Th[A]): Ref[A] = if (isInlineThunksOnForce) t.node match { @@ -241,6 +345,7 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => else ThunkForce(t) + /** Graph node to represent thunk forcing operation. */ case class ThunkForce[A](thunk: Ref[Thunk[A]]) extends Def[A] { implicit def resultType = thunk.elem.eItem override def transform(t: Transformer) = ThunkForce(t(thunk)) diff --git a/core/src/main/scala/scalan/primitives/UnBinOps.scala b/core/src/main/scala/scalan/primitives/UnBinOps.scala index ed690e7aa6..749556bf0e 100644 --- a/core/src/main/scala/scalan/primitives/UnBinOps.scala +++ b/core/src/main/scala/scalan/primitives/UnBinOps.scala @@ -4,44 +4,75 @@ import scalan.{Scalan, Base} trait UnBinOps extends Base { self: Scalan => - class UnOp[A, R](val opName: String, val applySeq: A => R)(implicit val eResult: Elem[R]) { + /** Base class for descriptors of unary operations. */ + abstract class UnOp[A, R](val opName: String)(implicit val eResult: Elem[R]) { override def toString = opName + /** Called as part of graph interpretation to execute the given unary operation. + * @param x operation argument + * @return result of applying this operation to x + */ + def applySeq(x: A): R + /** Builds a new graph node by applying this operation to the given argument. */ def apply(arg: Ref[A]) = applyUnOp(this, arg) + /** Whether the constants should be propagated through this operations by rewriting. */ def shouldPropagate(arg: A) = true } - class BinOp[A, R](val opName: String, val applySeq: (A, A) => R)(implicit val eResult: Elem[R]) { + /** Base class for descriptors of binary operations. */ + abstract class BinOp[A, R](val opName: String)(implicit val eResult: Elem[R]) { override def toString = opName + /** Called as part of graph interpretation to execute the given binary operation. + * @param x operation argument + * @param y operation argument + * @return result of applying this operation to (x, y) + */ + def applySeq(x: A, y: A): R + + /** Builds a new graph node by applying this operation to the given arguments. */ def apply(lhs: Ref[A], rhs: Ref[A]) = applyBinOp(this, lhs, rhs) + + /** Builds a new graph node by applying this operation to the given arguments. + * This is a short-cuting (aka lazy) version of the operation, where the lazyness is + * represented by Thunk. + */ def applyLazy(lhs: Ref[A], rhs: Ref[Thunk[A]]) = applyBinOpLazy(this, lhs, rhs) - // ideally shouldn't be necessary, but - // we curently can't handle division by zero properly + /** Whether the constants should be propagated through this operations by rewriting. */ def shouldPropagate(lhs: A, rhs: A) = true } type EndoUnOp[A] = UnOp[A, A] type EndoBinOp[A] = BinOp[A, A] + /** Graph node which represents application of the given unary operation to the given argument. */ case class ApplyUnOp[A, R](op: UnOp[A, R], arg: Ref[A]) extends BaseDef[R]()(op.eResult) { override def toString = s"$op($arg)" override def transform(t: Transformer): Def[R] = ApplyUnOp[A,R](op, t(arg)) } + /** Graph node which represents application of the given binary operation to the given arguments. */ case class ApplyBinOp[A, R](op: BinOp[A, R], lhs: Ref[A], rhs: Ref[A]) extends BaseDef[R]()(op.eResult) { override def toString = s"$op($lhs, $rhs)" override def transform(t: Transformer): Def[R] = ApplyBinOp[A,R](op, t(lhs), t(rhs)) } + + /** Graph node which represents application of the given binary operation to the given arguments + * where the second argument is lazy. + */ case class ApplyBinOpLazy[A, R](op: BinOp[A, R], lhs: Ref[A], rhs: Ref[Thunk[A]]) extends BaseDef[R]()(op.eResult) { override def toString = s"$lhs $op { $rhs }" override def transform(t: Transformer): Def[R] = ApplyBinOpLazy[A,R](op, t(lhs), t(rhs)) } + /** Overridable constructor of an unary operation node. */ def applyUnOp[A, R](op: UnOp[A, R], arg: Ref[A]): Ref[R] = ApplyUnOp(op, arg) + /** Overridable constructor of a binary operation node. */ def applyBinOp[A, R](op: BinOp[A, R], lhs: Ref[A], rhs: Ref[A]): Ref[R] = ApplyBinOp(op, lhs, rhs) + + /** Overridable constructor of a binary operation node with lazy argument. */ def applyBinOpLazy[A, R](op: BinOp[A, R], lhs: Ref[A], rhs: Ref[Thunk[A]]): Ref[R] = ApplyBinOpLazy(op, lhs, rhs) } \ No newline at end of file diff --git a/core/src/main/scala/scalan/primitives/UniversalOps.scala b/core/src/main/scala/scalan/primitives/UniversalOps.scala index 30aeb28dae..eaca5821ce 100644 --- a/core/src/main/scala/scalan/primitives/UniversalOps.scala +++ b/core/src/main/scala/scalan/primitives/UniversalOps.scala @@ -3,9 +3,13 @@ package scalan.primitives import scalan.{Base, Scalan} trait UniversalOps extends Base { scalan: Scalan => - case class HashCode[A]() extends UnOp[A, Int]("hashCode", _.hashCode) + case class HashCode[A]() extends UnOp[A, Int]("hashCode") { + override def applySeq(x: A): Int = x.hashCode + } - case class ToString[A]() extends UnOp[A, String]("toString", _.toString) + case class ToString[A]() extends UnOp[A, String]("toString") { + override def applySeq(x: A): String = x.toString + } /** Represents calculation of size in bytes of the given value. * The descriptor value.elem can be used to decompose value into components. diff --git a/core/src/main/scala/scalan/staged/AstGraphs.scala b/core/src/main/scala/scalan/staged/AstGraphs.scala index a9967bf100..dd801e25e3 100644 --- a/core/src/main/scala/scalan/staged/AstGraphs.scala +++ b/core/src/main/scala/scalan/staged/AstGraphs.scala @@ -1,7 +1,7 @@ package scalan.staged import scala.collection._ -import scalan.Scalan +import scalan.{Base, Scalan, emptyDBufferOfInt} import scalan.compilation.GraphVizConfig import spire.syntax.all.cfor import debox.{Set => DSet, Buffer => DBuffer, Map => DMap} @@ -70,11 +70,16 @@ trait AstGraphs extends Transforming { self: Scalan => } } } - val res = new Array[Sym](resIds.length) - cfor(0)(_ < resIds.length, _ + 1) { i => - res(i) = getSym(resIds(i)) + val resLength = resIds.length + if (resLength == 0) + EmptySeqOfSym + else { + val res = new Array[Sym](resLength) + cfor(0)(_ < resLength, _ + 1) { i => + res(i) = getSym(resIds(i)) + } + res } - res } /** Schedule represents a body of compound definition - topologically ordered @@ -86,21 +91,31 @@ trait AstGraphs extends Transforming { self: Scalan => /** Sequence of node references forming a schedule. * @hotspot don't beautify the code */ lazy val schedule: Schedule = { - val len = scheduleIds.length - val res = new Array[Sym](len) - cfor(0)(_ < len, _ + 1) { i => - res(i) = getSym(scheduleIds(i)) + val ids = scheduleIds + val len = ids.length + if (len == 0) EmptySeqOfSym + else { + val res = new Array[Sym](len) + cfor(0)(_ < len, _ + 1) { i => + res(i) = getSym(ids(i)) + } + res } - res } /** Set of symbol ids in the schedule. Can be used to quickly recognize * symbols belonging to the body of this definition. */ lazy val domain: DSet[Int] = { - val res = DSet.ofSize[Int](scheduleIds.length) - res ++= scheduleIds.toArray - res + val ids = scheduleIds + val len = ids.length + if (len == 0) { + EmptyDSetOfInt + } else { + val res = DSet.ofSize[Int](len) + res ++= ids.toArray + res + } } /** Whether this graph represents identity function. */ @@ -132,7 +147,10 @@ trait AstGraphs extends Transforming { self: Scalan => lazy val flatSchedule: Schedule = { val flatBuf = DBuffer.ofSize[Sym](schedule.length) buildFlatSchedule(schedule, flatBuf) - flatBuf.toArray + if (flatBuf.length > 0) + flatBuf.toArray + else + EmptySeqOfSym } /** Build usage information induced by the given schedule. @@ -180,21 +198,15 @@ trait AstGraphs extends Transforming { self: Scalan => def globalUsagesOf(s: Sym): DBuffer[Sym] = allNodes.get(s.node.nodeId) match { case Some(node) => node.outSyms - case None => DBuffer.empty[Sym] + case None => emptyDBufferOfSym } def hasManyUsagesGlobal(s: Sym): Boolean = globalUsagesOf(s).length > 1 - /** This empty buffer is returned every time the usages are requested for the node - * that is not in usageMap. - * WARNING! Since it is mutable, special care should be taken to not change this buffer. - * @hotspot used havily in scheduling */ - private val NoUsages = DBuffer.unsafe(new Array[Int](0)) - /** @hotspot for performance we return mutable structure, but it should never be changed. */ def usagesOf(id: Int): DBuffer[Int] = { val node = usageMap.getOrElse(id, null) - if (node == null) return NoUsages + if (node == null) return emptyDBufferOfInt node.usages } diff --git a/docs/aot-jit-switch.md b/docs/aot-jit-switch.md new file mode 100644 index 0000000000..7bed588428 --- /dev/null +++ b/docs/aot-jit-switch.md @@ -0,0 +1,195 @@ +## The Protocol and Requirements to replace AOT with JIT via soft-fork + +The goal of this document is to specify requirements for v4.0, v5.0 and upcoming releases. +It also specifies rules of transaction validation in the Ergo network with respect of +soft-fork activation which should be followed by different versions of nodes. +The v4.x -> v5.x soft-fork is motivated by the goal of switching from AOT to JIT-based +costing algorithm and the simplified ErgoTree interpreter. + +### Definitions +The text below we use the terms defined in the following table, please refer to it when +necessary. + +Term | Description +-----------------|------------ + _ScriptV1_ | The current version of ErgoTree (3.x releases) used in ErgoBlock v1. Bits 0-2 == 0 in the ErgoTree header byte. (see ErgoTree class). + _ScriptV2_ | The next version of ErgoTree (5.x releases) used after SF is activated. Bits 0-2 == 1 in the ErgoTree header byte. (see ErgoTree class). + R4.0-AOT-cost | cost estimation using v4.0 Ahead-Of-Time costing implementation + R4.0-AOT-verify | spending condition verification using v4.0 Ahead-Of-Time interpreter implementation + R5.0-JIT-verify | spending condition verification using v5.0 simplified interpreter with Just-In-Time costing of fullReduction and AOT sigma protocol costing. + skip-pool-tx | skip pool transaction when building a new block candidate + skip-accept | skip script evaluation (both costing and verification) and treat it as True proposition (accept spending) + skip-reject | skip script evaluation (both costing and verification) and treat it as False proposition (reject spending) + Validation Context | a tuple of (`Block Type`, `SF Status`, `Script Version`) + Validation Action | an action taken by a node in the given validation context + SF Status | soft-fork status of the block. The status is `active` when enough votes have been collected. + +### Script Validation Rules Summary + +Validation of scripts in blocks is defined for each release and depend on _validation +context_ which includes type of block, soft-fork status and script version. We denote +blocks being created by miners as `candidate` and those distributed across network as +`mined`. + +Thus, we have 8 different validation contexts multiplied by 2 node versions +having in total 16 validation rules as summarized in the following table, which +specifies the _validation action_ a node have to take in the given contexts. + +Rule#| SF Status| Block Type| Script Version | Release | Validation Action +-----|----------|-----------|----------------|---------|-------- +1 | inactive | candidate | Script v1 | v4.0 | R4.0-AOT-cost, R4.0-AOT-verify +2 | inactive | candidate | Script v1 | v5.0 | R4.0-AOT-cost, R4.0-AOT-verify +3 | inactive | candidate | Script v2 | v4.0 | skip-pool-tx (cannot handle) +4 | inactive | candidate | Script v2 | v5.0 | skip-pool-tx (wait activation) +|||| +5 | inactive | mined | Script v1 | v4.0 | R4.0-AOT-cost, R4.0-AOT-verify +6 | inactive | mined | Script v1 | v5.0 | R4.0-AOT-cost, R4.0-AOT-verify +7 | inactive | mined | Script v2 | v4.0 | skip-reject (cannot handle) +8 | inactive | mined | Script v2 | v5.0 | skip-reject (wait activation) +|||| +9 | active | candidate | Script v1 | v4.0 | R4.0-AOT-cost, R4.0-AOT-verify +10 | active | candidate | Script v1 | v5.0 | R5.0-JIT-verify +11 | active | candidate | Script v2 | v4.0 | skip-pool-tx (cannot handle) +12 | active | candidate | Script v2 | v5.0 | R5.0-JIT-verify +|||| +13 | active | mined | Script v1 | v4.0 | skip-accept (rely on majority) +14 | active | mined | Script v1 | v5.0 | R5.0-JIT-verify +15 | active | mined | Script v2 | v4.0 | skip-accept (rely on majority) +16 | active | mined | Script v2 | v5.0 | R5.0-JIT-verify + +Note the following properties of the validation rules. + +0. Please note that block creation is not a part of the Ergo consensus protocol, and +miners can do whatever they want, in particulare completely custom block assembly. For +this reason, the rules for `candidate` blocks are _reference implementation_ only, and +nodes are not required to follow them exactly. + +1. Rules 1-4 specify creation of new candidate blocks _before_ soft-fork is activated. +They require that the behaviour of v4.0 and v5.0 nodes should be identical. + +2. Rules 9-10 specify creation of new candidate blocks _after_ soft-fork is activated. +They are different for v4.0 and v5.0 nodes, but +[equivalent](#equivalence-properties-of-validation-actions) with respect to preserving +consensus (see also [Rule Descriptions](#rule-descriptions) for details). + +3. For any given tuple (`SF Status`, `Script Version`, `Release`) the _equivalent_ `ValidationAction` is +applied for both `candidate` and `mined` blocks. This proves the consistency of the rules +with respect to the change of the block status from `candidate` to `mined`, both before +and after soft-fork activation. + +4. Each rule `i`, where `i` is an odd number, defines a `Validation Action` performed by +a v4.0 node. Each such rule is paired with the `i+1` rule which defines `Validation Action` +performed by a v5.0 node. Any such a pair `(i, i+1)` of rules have `Validation Actions` +which are either the same or equivalent with respect to the [Equivalence Properties of +Validation Actions](#equivalence-properties-of-validation-actions). This proves +consistency of validation actions across v4.0 and v5.0 nodes. + +5. After SF is activated (`SF Status == active`), both AOT-cost and AOT-verify +implementations are no longer used in `Validation Action` of v5.0 nodes. The only context +where v5.0 node needs to use AOT based verification is given by Rule 6, which is to verify +a v1 script in a historical mined block before SF is activated. +However relying on [Prop 3](#equivalence-properties-of-validation-actions) we can replace +Rule 6 in a new v5.0.1 release with the following _equivalent_ rule + +Rule#| SF Status | Block Type| Script Version | Release | Validation Action +-----|-----------|-----------|----------------|---------|-------- +17 | inactive | mined | Script v1 | v5.0.1 | R5.0-JIT-verify + +This will allow to remove AOT implementation in v5.0.1 and simplify reference +implementation significantly. + +### Rule Descriptions + +#### Rules 1 and 2 + _Handle v1 script in candidate block when SF is not active._ + Ensured by _Prop1_ and _Prop2_ both v4.0 and v5.0 nodes use equivalent AOT-cost and + AOT-verify and thus have consensus. + Release v5.0 will contain both AOT and JIT versions simultaneously and thus can _behave as v4.0_ + before SF is activated and _behave as v5.0_ after SF activation. + +#### Rules 3 and 4 +_Both v4.0 and v5.0 nodes reject v2 scripts in new candidate blocks when SF is NOT active._ +This is ensured by _Prop5_ which is motivated by the following reasons: +- v4.0 nodes have no implementation of v2 scripts, thus rejecting them altogether both in + input and output boxes of new candidate blocks +- v5.0 nodes behave like v4.0 nodes and are waiting for majority of nodes to vote, thus +rejecting until SF is activated + +#### Rules 5 and 6 + _Handle v1 script in mined block when SF is not active._ + Similar to rules 1 and 2 but for `mined` blocks. + +#### Rules 7 and 8 +_Both v4.0 and v5.0 nodes reject v2 scripts in `mined` blocks when SF is NOT active._ +Similar to rules 3 and 4. + +#### Rules 9 and 10 +These rules allow v1 scripts to enter blockchain even after SF is activated (for backward +compatibility with applications). +Now, after SF is activated, the majority consist of v5.0 nodes and they will do +`R5.0-JIT-verify(Script v1)` which is equivalent to `R4.0-AOT-verify(Script v1)` due to +_Prop3_. + +To understand this pair of rules it is important to remember that a specific version of +ErgoTree (in this case v1) assumes the fixed semantics of all operations. This however +doesn't restrict the interpreter implementations and we use this fact +to switch from `R4.0-AOT-verify` to `R5.0-JIT-verify` relying on their equivalence +property [Prop 3](#equivalence-properties-of-validation-actions). + +However, for backward compatibility with applications we DON'T NEED equivalence of costing, hence +exact cost estimation is not necessary. For this reason we have the relaxed condition in +_Prop4_, which means that any ScriptV1 admitted by `R4.0-AOT-cost` will also be admitted by +`R5.0-JIT-cost`. For this reason, the v4.0 based application interacting with v5.0 node +will not notice the difference. + +#### Rules 11 and 12 +After SF is activated v4.0 node cannot verify transactions containing v2 scripts, as +a result the v4.0 node cannot include such transactions in new `candidate` blocks. Thus it +performs `skip-pool-tx` action essentially using only those mempool transactions which it +can handle. Majority of network nodes (v5.0) will do `R5.0-JIT-verify` validation of v2 +scripts in `candidate` blocks. + +#### Rules 13 and 14 +After SF is activated v4.0 node `skip-accept` verification of `mined` blocks by relying on +the majority of v5.0, essentially not performing verification even of v1 scripts. The +majority of nodes uses new JIT based implementation of ErgoTree interpreter +`R5.0-JIT-verify` procedure for costing and verification, have consensus about blocks and +v4.0 nodes just accept all mined blocks created elsewhere. + +#### Rules 15 and 16 +After SF is activated v4.0 node `skip-accept` of `mined` blocks by relying on the majority +of v5.0 nodes since it cannot handle v2 scripts. In the same context, the majority of +nodes uses the new JIT based implementation of ErgoTree interpreter `R5.0-JIT-verify` +procedure for costing and verification. + + +### Equivalence Properties of Validation Actions + +In order to guarantee network consensus in the presence of both v4.0 and v5.0 nodes +the implementation of `R4.0-AOT-verify`, `R5.0-AOT-verify`, `R4.0-AOT-cost`, +`R5.0-AOT-cost`, `R5.0-JIT-verify` should satisfy the following properties. + +_Prop 1._ AOT-verify is preserved: `forall s:ScriptV1, R4.0-AOT-verify(s) == R5.0-AOT-verify(s)` + +_Prop 2._ AOT-cost is preserved: `forall s:ScriptV1, R4.0-AOT-cost(s) == R5.0-AOT-cost(s)` + +_Prop 3._ JIT-verify can replace AOT-verify: `forall s:ScriptV1, R5.0-JIT-verify(s) == R4.0-AOT-verify(s)` + +_Prop 4._ JIT-cost is bound by AOT-cost: `forall s:ScriptV1, R5.0-JIT-cost(s) <= R4.0-AOT-cost(s)` + +_Prop 5._ ScriptV2 is rejected before SF is active: +`forall s:ScriptV2, if not SF is active => R4.0-verify(s) == R5.0-verify(s) == Reject` + +### Other Notes + +- Since we are going to fix some bugs, the behaviour of v1 and v2 scripts in general not +required to be precisely equivalent. This is because `R5.0-JIT-verify` supports both v1 +and v2 scripts so that `R5.0-JIT-verify(Script_v1) == R4.0-AOT-verify(Script_v1)` due to +[Prop 1](#equivalence-properties-of-validation-actions) and `R5.0-JIT-verify(Script_v2)` +may implement an interpreter for a completely different language. Of cause, we don't want it to be _completely_ +different, in particular to ease migration. +So while old apps are supported unchanged, new apps are encouraged to use new ErgoScript +frontend which will compile v2 scripts. + +- Also, on v1 and v2, it would be better to avoid changing semantics of existing ops, +deprecation old and introducing new ones is cleaner \ No newline at end of file diff --git a/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala b/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala index b23cea1e81..423386051b 100644 --- a/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala +++ b/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala @@ -4,8 +4,9 @@ import special.collection.Coll import java.math.BigInteger import scalan.util.Extensions.BigIntegerOps +// TODO refactor: this class should be removed before v5.0 abstract class TestBigInt(private[sigma] val value: BigInteger) extends BigInt { - val dsl: TestSigmaDslBuilder = new TestSigmaDslBuilder + def dsl: TestSigmaDslBuilder override def toByte : Byte = value.byteValueExact() override def toShort: Short = value.shortValueExact() diff --git a/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala b/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala index bf57777773..d8b84fd2f2 100644 --- a/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala +++ b/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala @@ -3,8 +3,9 @@ package special.sigma import org.bouncycastle.math.ec.ECPoint import special.collection.Coll +// TODO refactor: this class should be removed before v5.0 abstract class TestGroupElement(private[sigma] val value: ECPoint) extends GroupElement { - val dsl: TestSigmaDslBuilder = new TestSigmaDslBuilder + def dsl: TestSigmaDslBuilder override def toString: String = s"GroupElement(${Extensions.showECPoint(value)})" diff --git a/sigmastate/src/main/scala/org/ergoplatform/ErgoAddress.scala b/sigmastate/src/main/scala/org/ergoplatform/ErgoAddress.scala index 6adc685cc6..1116b9fbf4 100644 --- a/sigmastate/src/main/scala/org/ergoplatform/ErgoAddress.scala +++ b/sigmastate/src/main/scala/org/ergoplatform/ErgoAddress.scala @@ -262,7 +262,7 @@ case class ErgoAddressEncoder(networkPrefix: NetworkPrefix) { import ErgoAddressEncoder._ /** This value is be used implicitly in the methods below. */ - implicit private val ergoAddressEncoder: ErgoAddressEncoder = this + implicit private def ergoAddressEncoder: ErgoAddressEncoder = this /** Converts the given [[ErgoAddress]] to Base58 string. */ def toString(address: ErgoAddress): String = { diff --git a/sigmastate/src/main/scala/org/ergoplatform/ErgoLikeContext.scala b/sigmastate/src/main/scala/org/ergoplatform/ErgoLikeContext.scala index 438978fd34..6f5544e847 100644 --- a/sigmastate/src/main/scala/org/ergoplatform/ErgoLikeContext.scala +++ b/sigmastate/src/main/scala/org/ergoplatform/ErgoLikeContext.scala @@ -118,17 +118,16 @@ class ErgoLikeContext(val lastBlockUtxoRoot: AvlTreeData, dataBoxes, boxesToSpend, newSpendingTransaction, selfIndex, extension, validationSettings, costLimit, initCost) - override def toSigmaContext(IR: Evaluation, isCost: Boolean, extensions: Map[Byte, AnyValue] = Map()): sigma.Context = { - implicit val IRForBox: Evaluation = IR + override def toSigmaContext(isCost: Boolean, extensions: Map[Byte, AnyValue] = Map()): sigma.Context = { import Evaluation._ - def contextVars(m: Map[Byte, AnyValue])(implicit IR: Evaluation): Coll[AnyValue] = { + def contextVars(m: Map[Byte, AnyValue]): Coll[AnyValue] = { val maxKey = if (m.keys.isEmpty) 0 else m.keys.max val res = new Array[AnyValue](maxKey + 1) for ((id, v) <- m) { res(id) = v } - IR.sigmaDslBuilderValue.Colls.fromArray(res) + CostingSigmaDslBuilder.Colls.fromArray(res) } val dataInputs = this.dataBoxes.toArray.map(_.toTestBox(isCost)).toColl diff --git a/sigmastate/src/main/scala/sigmastate/Values.scala b/sigmastate/src/main/scala/sigmastate/Values.scala index cb029de642..b2a0409761 100644 --- a/sigmastate/src/main/scala/sigmastate/Values.scala +++ b/sigmastate/src/main/scala/sigmastate/Values.scala @@ -37,6 +37,7 @@ import sigmastate.lang.SourceContext import special.collection.Coll import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer object Values { @@ -114,6 +115,12 @@ object Values { } def notSupportedError(v: SValue, opName: String) = throw new IllegalArgumentException(s"Method $opName is not supported for node $v") + + /** Immutable empty array of values. Can be used to avoid allocation. */ + val EmptyArray = Array.empty[SValue] + + /** Immutable empty Seq of values. Can be used to avoid allocation. */ + val EmptySeq: IndexedSeq[SValue] = EmptyArray } trait ValueCompanion extends SigmaNodeCompanion { @@ -183,7 +190,17 @@ object Values { object Constant extends ValueCompanion { override def opCode: OpCode = ConstantCode + + /** Immutable empty array, can be used to save allocations in many places. */ + val EmptyArray = Array.empty[Constant[SType]] + + /** Immutable empty IndexedSeq, can be used to save allocations in many places. */ + val EmptySeq: IndexedSeq[Constant[SType]] = Array.empty[Constant[SType]] + + /** Helper factory method. */ def apply[S <: SType](value: S#WrappedType, tpe: S): Constant[S] = ConstantNode(value, tpe) + + /** Recognizer of Constant tree nodes used in patterns. */ def unapply[S <: SType](v: EvaluatedValue[S]): Option[(S#WrappedType, S)] = v match { case ConstantNode(value, tpe) => Some((value, tpe)) case _ => None @@ -672,8 +689,12 @@ object Values { // NOTE, the assert below should be commented before production release. // Is it there for debuging only, basically to catch call stacks where the fancy types may // occasionally be used. -// assert(items.isInstanceOf[mutable.WrappedArray[_]] || items.isInstanceOf[mutable.IndexedSeq[_]], +// assert( +// items.isInstanceOf[mutable.WrappedArray[_]] || +// items.isInstanceOf[ArrayBuffer[_]] || +// items.isInstanceOf[mutable.ArraySeq[_]], // s"Invalid types of items ${items.getClass}") + private val isBooleanConstants = elementType == SBoolean && items.forall(_.isInstanceOf[Constant[_]]) override def companion = if (isBooleanConstants) ConcreteCollectionBooleanConstant @@ -751,6 +772,13 @@ object Values { def rhs: SValue def isValDef: Boolean } + object BlockItem { + /** Immutable empty array, can be used to save allocations in many places. */ + val EmptyArray = Array.empty[BlockItem] + + /** Immutable empty IndexedSeq to save allocations in many places. */ + val EmptySeq: IndexedSeq[BlockItem] = EmptyArray + } /** IR node for let-bound expressions `let x = rhs` which is ValDef, or `let f[T] = rhs` which is FunDef. * These nodes are used to represent ErgoTrees after common sub-expression elimination. @@ -810,7 +838,14 @@ object Values { */ case class FuncValue(args: IndexedSeq[(Int,SType)], body: Value[SType]) extends NotReadyValue[SFunc] { override def companion = FuncValue - lazy val tpe: SFunc = SFunc(args.toArray.map(_._2), body.tpe) + lazy val tpe: SFunc = { + val nArgs = args.length + val argTypes = new Array[SType](nArgs) + cfor(0)(_ < nArgs, _ + 1) { i => + argTypes(i) = args(i)._2 + } + SFunc(argTypes, body.tpe) + } /** This is not used as operation, but rather to form a program structure */ override def opType: SFunc = SFunc(mutable.WrappedArray.empty, tpe) } diff --git a/sigmastate/src/main/scala/sigmastate/eval/BigIntegerOps.scala b/sigmastate/src/main/scala/sigmastate/eval/BigIntegerOps.scala index 547092bad6..799382cf10 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/BigIntegerOps.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/BigIntegerOps.scala @@ -53,6 +53,17 @@ object NumericOps { def toFloat(x: BigInt): Float = CostingSigmaDslBuilder.toBigInteger(x).floatValue() def toDouble(x: BigInt): Double = CostingSigmaDslBuilder.toBigInteger(x).doubleValue() } + + /** The instance of Integral for BigInt. + * + * Note: ExactIntegral is not defined for [[special.sigma.BigInt]]. + * This is because arithmetic BigInt operations are handled specially + * (see `case op: ArithOp[t] if op.tpe == SBigInt =>` in RuntimeCosting.scala). + * As result [[scalan.primitives.UnBinOps.ApplyBinOp]] nodes are not created for BigInt + * operations, and hence operation descriptors such as + * [[scalan.primitives.NumericOps.IntegralDivide]] and + * [[scalan.primitives.NumericOps.IntegralMod]] are not used for BigInt. + */ implicit object BigIntIsIntegral extends BigIntIsIntegral with OrderingOps.BigIntOrdering implicit object BigIntIsExactNumeric extends ExactNumeric[BigInt] { @@ -62,13 +73,6 @@ object NumericOps { override def times(x: BigInt, y: BigInt): BigInt = n.times(x, y) } - implicit object BigIntIsExactIntegral extends ExactIntegral[BigInt] { - val n = BigIntIsIntegral - override def plus(x: BigInt, y: BigInt): BigInt = n.plus(x, y) - override def minus(x: BigInt, y: BigInt): BigInt = n.minus(x, y) - override def times(x: BigInt, y: BigInt): BigInt = n.times(x, y) - } - implicit object BigIntIsExactOrdering extends ExactOrderingImpl[BigInt](BigIntIsIntegral) } diff --git a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala index c1a6fe3d54..0ce67b66ff 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala @@ -25,6 +25,7 @@ import special.Types._ import scala.collection.immutable.HashSet import scala.collection.mutable import scala.collection.mutable.ArrayBuffer +import spire.syntax.all.cfor /** This is a slice in IRContext cake which implements evaluation of graphs. */ @@ -81,99 +82,17 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => private val SPCM = WSpecialPredefCompanionMethods private val MBM = MonoidBuilderMethods - private val _allowedOpCodesInCosting: HashSet[OpCodeExtra] = HashSet[OpCode]( - AppendCode, - ByIndexCode, - ConstantCode, - DivisionCode, - DowncastCode, - ExtractBytesWithNoRefCode, - ExtractRegisterAs, - ExtractScriptBytesCode, - FoldCode, - FuncApplyCode, - FuncValueCode, - GetVarCode, - InputsCode, - LastBlockUtxoRootHashCode, - MapCollectionCode, - FlatMapCollectionCode, - MaxCode, - MethodCallCode, - MinCode, - MinusCode, - ModuloCode, - MultiplyCode, - OptionGetCode, - OptionGetOrElseCode, - OptionIsDefinedCode, - OutputsCode, - PlusCode, - SelectFieldCode, - SelfCode, - SigmaPropBytesCode, - SizeOfCode, - SliceCode, - TupleCode, - UpcastCode - ).map(toExtra) ++ HashSet[OpCodeExtra]( - OpCostCode, - PerKbCostOfCode, - CastCode, - IntPlusMonoidCode, - ThunkDefCode, - ThunkForceCode, - SCMInputsCode, - SCMOutputsCode, - SCMDataInputsCode, - SCMSelfBoxCode, - SCMLastBlockUtxoRootHashCode, - SCMHeadersCode, - SCMPreHeaderCode, - SCMGetVarCode, - SBMPropositionBytesCode, - SBMBytesCode, - SBMBytesWithoutRefCode, - SBMRegistersCode, - SBMGetRegCode, - SBMTokensCode, - SSPMPropBytesCode, - SAVMTValCode, - SAVMValueSizeCode, - SizeMDataSizeCode, - SPairLCode, - SPairRCode, - SCollMSizesCode, - SOptMSizeOptCode, - SFuncMSizeEnvCode, - CSizePairCtorCode, - CSizeFuncCtorCode, - CSizeOptionCtorCode, - CSizeCollCtorCode, - CSizeBoxCtorCode, - CSizeContextCtorCode, - CSizeAnyValueCtorCode, - CReplCollCtorCode, - PairOfColsCtorCode, - CollMSumCode, - CBMReplicateCode, - CBMFromItemsCode, - CostOfCode, - UOSizeOfCode, - SPCMSomeCode - ) - /** Returns a set of opCodeEx values (extended op codes) which are allowed in cost function. * This may include both ErgoTree codes (from OpCodes) and also additional non-ErgoTree codes * from OpCodesExtra. * Any IR graph node can be uniquely assigned to extended op code value * from OpCodes + OpCodesExtra combined range. (See getOpCodeEx) */ - protected def allowedOpCodesInCosting: HashSet[OpCodeExtra] = _allowedOpCodesInCosting + protected def allowedOpCodesInCosting: HashSet[OpCodeExtra] = Evaluation.AllowedOpCodesInCosting + /** Checks the given opCode belong to an operation allowed in cost function. */ def isAllowedOpCodeInCosting(opCode: OpCodeExtra): Boolean = allowedOpCodesInCosting.contains(opCode) - /** Returns extended op code assigned to the given IR graph node. - */ + /** Returns extended op code assigned to the given IR graph node. */ def getOpCodeEx(d: Def[_]): OpCodeExtra = d match { case _: OpCost => OpCostCode case _: PerKbCostOf => PerKbCostOfCode @@ -451,8 +370,6 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => case _ => error(s"Cannot find value in environment for $s (dataEnv = $dataEnv)") } - def msgCostLimitError(cost: Long, limit: Long) = s"Estimated execution cost $cost exceeds the limit $limit" - /** Incapsulate simple monotonic (add only) counter with reset. */ class CostCounter(val initialCost: Int) { private var _currentCost: Int = initialCost @@ -558,7 +475,7 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => // if (cost < limit) // println(s"FAIL FAST in loop: $accumulatedCost > $limit") // TODO cover with tests - throw new CostLimitException(accumulatedCost, msgCostLimitError(accumulatedCost, limit), None) + throw new CostLimitException(accumulatedCost, Evaluation.msgCostLimitError(accumulatedCost, limit), None) } } @@ -635,11 +552,22 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => case wc: LiftedConst[_,_] => out(wc.constValue) - case _: SigmaDslBuilder | _: CollBuilder | _: CostedBuilder | - _: WSpecialPredefCompanion | - _: IntPlusMonoid | _: LongPlusMonoid | - MBM.intPlusMonoid(_) | MBM.longPlusMonoid(_) => // TODO no HF proof - out(dataEnv.getOrElse(sym, !!!(s"Cannot resolve companion instance for $sym -> ${sym.node}"))) + case _: IntPlusMonoid | MBM.intPlusMonoid(_) => + // always return the same value since monoids are singletons + out(monoidBuilderValue.intPlusMonoid) + + case _: LongPlusMonoid | MBM.longPlusMonoid(_) => + // always return the same value since monoids are singletons + out(monoidBuilderValue.longPlusMonoid) + + case _: SigmaDslBuilder => + // always return the same value since SigmaDslBuilder is singleton + out(sigmaDslBuilderValue) + + case _: CollBuilder | _: CostedBuilder | _: WSpecialPredefCompanion => + out(dataEnv.getOrElse(sym, { + !!!(s"Cannot resolve companion instance for $sym -> ${sym.node}") + })) case SigmaM.isValid(In(prop: AnyRef)) => out(prop) @@ -852,6 +780,91 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => object Evaluation { import special.sigma._ import special.collection._ + import OpCodes._ + + val AllowedOpCodesInCosting: HashSet[OpCodeExtra] = HashSet[OpCode]( + AppendCode, + ByIndexCode, + ConstantCode, + DivisionCode, + DowncastCode, + ExtractBytesWithNoRefCode, + ExtractRegisterAs, + ExtractScriptBytesCode, + FoldCode, + FuncApplyCode, + FuncValueCode, + GetVarCode, + InputsCode, + LastBlockUtxoRootHashCode, + MapCollectionCode, + FlatMapCollectionCode, + MaxCode, + MethodCallCode, + MinCode, + MinusCode, + ModuloCode, + MultiplyCode, + OptionGetCode, + OptionGetOrElseCode, + OptionIsDefinedCode, + OutputsCode, + PlusCode, + SelectFieldCode, + SelfCode, + SigmaPropBytesCode, + SizeOfCode, + SliceCode, + TupleCode, + UpcastCode + ).map(toExtra) ++ HashSet[OpCodeExtra]( + OpCostCode, + PerKbCostOfCode, + CastCode, + IntPlusMonoidCode, + ThunkDefCode, + ThunkForceCode, + SCMInputsCode, + SCMOutputsCode, + SCMDataInputsCode, + SCMSelfBoxCode, + SCMLastBlockUtxoRootHashCode, + SCMHeadersCode, + SCMPreHeaderCode, + SCMGetVarCode, + SBMPropositionBytesCode, + SBMBytesCode, + SBMBytesWithoutRefCode, + SBMRegistersCode, + SBMGetRegCode, + SBMTokensCode, + SSPMPropBytesCode, + SAVMTValCode, + SAVMValueSizeCode, + SizeMDataSizeCode, + SPairLCode, + SPairRCode, + SCollMSizesCode, + SOptMSizeOptCode, + SFuncMSizeEnvCode, + CSizePairCtorCode, + CSizeFuncCtorCode, + CSizeOptionCtorCode, + CSizeCollCtorCode, + CSizeBoxCtorCode, + CSizeContextCtorCode, + CSizeAnyValueCtorCode, + CReplCollCtorCode, + PairOfColsCtorCode, + CollMSumCode, + CBMReplicateCode, + CBMFromItemsCode, + CostOfCode, + UOSizeOfCode, + SPCMSomeCode + ) + + def msgCostLimitError(cost: Long, limit: Long) = s"Estimated execution cost $cost exceeds the limit $limit" /** Transforms a serializable ErgoTree type descriptor to the corresponding RType descriptor of SigmaDsl, * which is used during evaluation. @@ -874,15 +887,25 @@ object Evaluation { case SGroupElement => GroupElementRType case SAvlTree => AvlTreeRType case SSigmaProp => SigmaPropRType - case STuple(Seq(tpeA, tpeB)) => + case tup: STuple if tup.items.length == 2 => + val tpeA = tup.items(0) + val tpeB = tup.items(1) pairRType(stypeToRType(tpeA), stypeToRType(tpeB)) case STuple(items) => val types = items.toArray - tupleRType(types.map(t => stypeToRType(t).asInstanceOf[SomeType])) + val len = types.length + val rtypes = new Array[SomeType](len) + cfor(0)(_ < len, _ + 1) { i => + rtypes(i) = stypeToRType(types(i)).asInstanceOf[SomeType] + } + tupleRType(rtypes) case c: SCollectionType[a] => collRType(stypeToRType(c.elemType)) case o: SOption[a] => optionRType(stypeToRType(o.elemType)) - case SFunc(Seq(tpeArg), tpeRange, Nil) => funcRType(stypeToRType(tpeArg), stypeToRType(tpeRange)) - case _ => sys.error(s"Don't know how to convert SType $t to RType") + case SFunc(args, tpeRange, Nil) if args.length == 1 => + val tpeArg = args(0) + funcRType(stypeToRType(tpeArg), stypeToRType(tpeRange)) + case _ => + sys.error(s"Don't know how to convert SType $t to RType") }).asInstanceOf[RType[T#WrappedType]] /** Transforms RType descriptor of SigmaDsl, which is used during evaluation, diff --git a/sigmastate/src/main/scala/sigmastate/eval/IRContext.scala b/sigmastate/src/main/scala/sigmastate/eval/IRContext.scala index 3f5b27b762..9d24faec88 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/IRContext.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/IRContext.scala @@ -131,7 +131,7 @@ trait IRContext extends Evaluation with TreeBuilding { val totalCost = JMath.addExact(initCost, scaledCost) if (totalCost > maxCost) { // TODO cover with tests - throw new CostLimitException(totalCost, msgCostLimitError(totalCost, maxCost), None) + throw new CostLimitException(totalCost, Evaluation.msgCostLimitError(totalCost, maxCost), None) } totalCost.toInt } diff --git a/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala b/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala index 9cf0178843..02124bac5d 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala @@ -903,8 +903,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => (ByteElement, ByteIsExactIntegral), (ShortElement, ShortIsExactIntegral), (IntElement, IntIsExactIntegral), - (LongElement, LongIsExactIntegral), - (bigIntElement, BigIntIsExactIntegral) + (LongElement, LongIsExactIntegral) ) private lazy val elemToExactOrderingMap = Map[Elem[_], ExactOrdering[_]]( (ByteElement, ByteIsExactOrdering), @@ -1232,7 +1231,11 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => case BlockValue(binds, res) => var curEnv = env - for (vd @ ValDef(n, _, b) <- binds) { + val len = binds.length + cfor(0)(_ < len, _ + 1) { i => + val vd = binds(i).asInstanceOf[ValDef] + val n = vd.id + val b = vd.rhs if (curEnv.contains(n)) error(s"Variable $n already defined ($n = ${curEnv(n)}", vd.sourceContext.toOption) val bC = evalNode(ctx, curEnv, b) curEnv = curEnv + (n -> bC) @@ -1601,10 +1604,17 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => RCCostedPrim(v, opCost(v, Array(lC.cost, rC.cost), costOf(node)), SizeBigInt) case OR(input) => input match { - case ConcreteCollection(items, tpe) => - val itemsC = items.map(item => eval(adaptSigmaBoolean(item))) - val res = sigmaDslBuilder.anyOf(colBuilder.fromItems(itemsC.map(_.value): _*)) - val costs = itemsC.map(_.cost) + case ConcreteCollection(items, _) => + val len = items.length + val values = new Array[Ref[Boolean]](len) + val costs = new Array[Ref[Int]](len) + cfor(0)(_ < len, _ + 1) { i => + val item = items(i) + val itemC = eval(adaptSigmaBoolean(item)) + values(i) = itemC.value + costs(i) = itemC.cost + } + val res = sigmaDslBuilder.anyOf(colBuilder.fromItems(values: _*)) val nOps = costs.length - 1 val cost = opCost(res, costs, perItemCostOf(node, nOps)) withConstantSize(res, cost) @@ -1617,10 +1627,17 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => } case AND(input) => input match { - case ConcreteCollection(items, tpe) => - val itemsC = items.map(item => eval(adaptSigmaBoolean(item))) - val res = sigmaDslBuilder.allOf(colBuilder.fromItems(itemsC.map(_.value): _*)) - val costs = itemsC.map(_.cost) + case ConcreteCollection(items, _) => + val len = items.length + val values = new Array[Ref[Boolean]](len) + val costs = new Array[Ref[Int]](len) + cfor(0)(_ < len, _ + 1) { i => + val item = items(i) + val itemC = eval(adaptSigmaBoolean(item)) + values(i) = itemC.value + costs(i) = itemC.cost + } + val res = sigmaDslBuilder.allOf(colBuilder.fromItems(values: _*)) val nOps = costs.length - 1 val cost = opCost(res, costs, perItemCostOf(node, nOps)) withConstantSize(res, cost) @@ -1633,10 +1650,17 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => } case XorOf(input) => input match { - case ConcreteCollection(items, tpe) => - val itemsC = items.map(item => eval(item)) - val res = sigmaDslBuilder.xorOf(colBuilder.fromItems(itemsC.map(_.value): _*)) - val costs = itemsC.map(_.cost) + case ConcreteCollection(items, _) => + val len = items.length + val values = new Array[Ref[Boolean]](len) + val costs = new Array[Ref[Int]](len) + cfor(0)(_ < len, _ + 1) { i => + val item = items(i) + val itemC = eval(adaptSigmaBoolean(item)) + values(i) = itemC.value + costs(i) = itemC.cost + } + val res = sigmaDslBuilder.xorOf(colBuilder.fromItems(values: _*)) val nOps = costs.length - 1 val cost = opCost(res, costs, perItemCostOf(node, nOps)) withConstantSize(res, cost) @@ -1680,16 +1704,30 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => } case SigmaAnd(items) => - val itemsC = items.map(eval) - val res = sigmaDslBuilder.allZK(colBuilder.fromItems(itemsC.map(s => asRep[SigmaProp](s.value)): _*)) - val costs = itemsC.map(_.cost) + val len = items.length + val values = new Array[Ref[SigmaProp]](len) + val costs = new Array[Ref[Int]](len) + cfor(0)(_ < len, _ + 1) { i => + val item = items(i) + val itemC = eval(item) + values(i) = asRep[SigmaProp](itemC.value) + costs(i) = itemC.cost + } + val res = sigmaDslBuilder.allZK(colBuilder.fromItems(values: _*)) val cost = opCost(res, costs, perItemCostOf(node, costs.length)) RCCostedPrim(res, cost, SizeSigmaProposition) case SigmaOr(items) => - val itemsC = items.map(eval) - val res = sigmaDslBuilder.anyZK(colBuilder.fromItems(itemsC.map(s => asRep[SigmaProp](s.value)): _*)) - val costs = itemsC.map(_.cost) + val len = items.length + val values = new Array[Ref[SigmaProp]](len) + val costs = new Array[Ref[Int]](len) + cfor(0)(_ < len, _ + 1) { i => + val item = items(i) + val itemC = eval(item) + values(i) = asRep[SigmaProp](itemC.value) + costs(i) = itemC.cost + } + val res = sigmaDslBuilder.anyZK(colBuilder.fromItems(values: _*)) val cost = opCost(res, costs, perItemCostOf(node, costs.length)) RCCostedPrim(res, cost, SizeSigmaProposition) @@ -1802,8 +1840,29 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => // fallback rule for MethodCall, should be the last case in the list case Terms.MethodCall(obj, method, args, typeSubst) if method.objType.coster.isDefined => val objC = eval(obj) - val argsC = args.map(eval) - val elems = typeSubst.values.toSeq.map(tpe => liftElem(stypeToElem(tpe).asInstanceOf[Elem[Any]])) + val argsC: Seq[RCosted[SType#WrappedType]] = + if (args.isEmpty) + EmptySeqOfSym.asInstanceOf[Seq[RCosted[SType#WrappedType]]] + else { + val len = args.length + val res = new Array[RCosted[SType#WrappedType]](len) + cfor(0)(_ < len, _ + 1) { i => + res(i) = eval(args(i)) + } + res + } + val elems: Seq[Sym] = + if (typeSubst.isEmpty) + EmptySeqOfSym + else { + val ts = typeSubst.values.toArray + val len = ts.length + val res = new Array[Sym](len) + cfor(0)(_ < len, _ + 1) { i => + res(i) = liftElem(stypeToElem(ts(i)).asInstanceOf[Elem[Any]]) + } + res + } method.objType.coster.get(IR)(objC, method, argsC, elems) case _ => diff --git a/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala b/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala index 92e4018a96..0e8253391a 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala @@ -1,7 +1,7 @@ package sigmastate.eval -import sigmastate.Values.{BlockValue, BoolValue, Constant, ConstantNode, SValue, SigmaPropConstant, ValDef, ValUse, Value} +import sigmastate.Values.{BlockItem, BlockValue, BoolValue, Constant, ConstantNode, SValue, SigmaPropConstant, ValDef, ValUse, Value} import org.ergoplatform._ import org.ergoplatform.{Height, Inputs, Outputs, Self} @@ -254,7 +254,7 @@ trait TreeBuilding extends RuntimeCosting { IR: IRContext => case (mth @ SCollection.ZipMethod, Seq(coll)) => val typeSubst = Map(SCollection.tOV -> coll.asCollection[SType].tpe.elemType) typeSubst - case (mth, _) => SigmaTyper.emptySubst + case (mth, _) => SigmaTyper.EmptySubst } val specMethod = method.withConcreteTypes(typeSubst + (SCollection.tIV -> colTpe.elemType)) builder.mkMethodCall(col, specMethod, args.toIndexedSeq, Map()) @@ -422,7 +422,7 @@ trait TreeBuilding extends RuntimeCosting { IR: IRContext => } val Seq(root) = subG.roots val rhs = buildValue(ctx, mainG, curEnv, root, curId, constantsProcessing) - val res = if (valdefs.nonEmpty) BlockValue(valdefs.toIndexedSeq, rhs) else rhs + val res = if (valdefs.nonEmpty) BlockValue(valdefs.toArray[BlockItem], rhs) else rhs res } diff --git a/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala b/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala index 1c78a7f82f..619505720a 100644 --- a/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala +++ b/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala @@ -10,7 +10,7 @@ import sigmastate.basics.DLogProtocol.{DLogInteractiveProver, FirstDLogProverMes import scorex.util.ScorexLogging import sigmastate.SCollection.SByteArray import sigmastate.Values._ -import sigmastate.eval.{IRContext, Sized} +import sigmastate.eval.{IRContext, Sized, Evaluation} import sigmastate.lang.Terms.ValueOps import sigmastate.basics._ import sigmastate.interpreter.Interpreter.{ScriptEnv, VerificationResult} @@ -53,7 +53,7 @@ trait Interpreter extends ScorexLogging { val currCost = JMath.addExact(context.initCost, scriptComplexity) val remainingLimit = context.costLimit - currCost if (remainingLimit <= 0) - throw new CostLimitException(currCost, msgCostLimitError(currCost, context.costLimit), None) // TODO cover with tests + throw new CostLimitException(currCost, Evaluation.msgCostLimitError(currCost, context.costLimit), None) // TODO cover with tests val ctx1 = context.withInitCost(currCost).asInstanceOf[CTX] (ctx1, script) @@ -115,7 +115,7 @@ trait Interpreter extends ScorexLogging { (res, currContext.value) } - def calcResult(context: special.sigma.Context, calcF: Ref[IR.Context => Any]): special.sigma.SigmaProp = { + private def calcResult(context: special.sigma.Context, calcF: Ref[IR.Context => Any]): special.sigma.SigmaProp = { import IR._ import Context._ import SigmaProp._ @@ -156,7 +156,7 @@ trait Interpreter extends ScorexLogging { CheckCostFunc(IR)(asRep[Any => Int](costF)) - val costingCtx = context.toSigmaContext(IR, isCost = true) + val costingCtx = context.toSigmaContext(isCost = true) val estimatedCost = IR.checkCostWithContext(costingCtx, costF, maxCost, initCost).getOrThrow IR.onEstimatedCost(env, exp, costingRes, costingCtx, estimatedCost) @@ -164,7 +164,7 @@ trait Interpreter extends ScorexLogging { // check calc val calcF = costingRes.calcF CheckCalcFunc(IR)(calcF) - val calcCtx = context.toSigmaContext(IR, isCost = false) + val calcCtx = context.toSigmaContext(isCost = false) val res = calcResult(calcCtx, calcF) SigmaDsl.toSigmaBoolean(res) -> estimatedCost } @@ -232,7 +232,7 @@ trait Interpreter extends ScorexLogging { val initCost = JMath.addExact(ergoTree.complexity.toLong, context.initCost) val remainingLimit = context.costLimit - initCost if (remainingLimit <= 0) - throw new CostLimitException(initCost, msgCostLimitError(initCost, context.costLimit), None) // TODO cover with tests + throw new CostLimitException(initCost, Evaluation.msgCostLimitError(initCost, context.costLimit), None) // TODO cover with tests val contextWithCost = context.withInitCost(initCost).asInstanceOf[CTX] diff --git a/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala b/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala index ea2470c0e7..5965c533e1 100644 --- a/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala +++ b/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala @@ -64,6 +64,12 @@ trait InterpreterContext { /** Creates a new instance with given validation settings. */ def withValidationSettings(newVs: SigmaValidationSettings): InterpreterContext - /** Creates `special.sigma.Context` instance based on this context. */ - def toSigmaContext(IR: Evaluation, isCost: Boolean, extensions: Map[Byte, AnyValue] = Map()): sigma.Context + /** Creates `special.sigma.Context` instance based on this context. The created instance + * contains all data represented using types form [[special.sigma]] package. + * These types are used internally by ErgoTree interpreter. + * Thus, this method performs transformation from Ergo to internal Sigma representation + * of all context data. + * @see sigmastate.eval.Evaluation + */ + def toSigmaContext(isCost: Boolean, extensions: Map[Byte, AnyValue] = Map()): sigma.Context } diff --git a/sigmastate/src/main/scala/sigmastate/lang/SigmaTyper.scala b/sigmastate/src/main/scala/sigmastate/lang/SigmaTyper.scala index e1b6c1df03..79f24ab509 100644 --- a/sigmastate/src/main/scala/sigmastate/lang/SigmaTyper.scala +++ b/sigmastate/src/main/scala/sigmastate/lang/SigmaTyper.scala @@ -36,9 +36,9 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe val global = Global.withPropagatedSrcCtx(srcCtx) val node = for { pf <- method.irInfo.irBuilder - res <- pf.lift((builder, global, method, args, emptySubst)) + res <- pf.lift((builder, global, method, args, EmptySubst)) } yield res - node.getOrElse(mkMethodCall(global, method, args, emptySubst).withPropagatedSrcCtx(srcCtx)) + node.getOrElse(mkMethodCall(global, method, args, EmptySubst).withPropagatedSrcCtx(srcCtx)) } /** * Rewrite tree to typed tree. Checks constituent names and types. Uses @@ -286,7 +286,7 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe case None => error(s"Invalid argument type of method call $mc : expected ${sfunc.tDom}; actual: $actualTypes", mc.sourceContext) } - case _ => emptySubst + case _ => EmptySubst } method.irInfo.irBuilder.flatMap(_.lift(builder, newObj, method, newArgs, typeSubst)) .getOrElse(mkMethodCall(newObj, method, newArgs, typeSubst)) @@ -614,7 +614,7 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe object SigmaTyper { type STypeSubst = Map[STypeVar, SType] - val emptySubst = Map.empty[STypeVar, SType] + val EmptySubst = Map.empty[STypeVar, SType] /** Performs pairwise type unification making sure each type variable is equally * substituted in all items. */ @@ -623,7 +623,7 @@ object SigmaTyper { val itemsUni = (items1, items2).zipped.map((t1, t2) => unifyTypes(t1,t2)) if (itemsUni.forall(_.isDefined)) { // merge substitutions making sure the same id is equally substituted in all items - val merged = itemsUni.foldLeft(emptySubst)((acc, subst) => { + val merged = itemsUni.foldLeft(EmptySubst)((acc, subst) => { var res = acc for ((id, t) <- subst.get) { if (res.contains(id) && res(id) != t) return None @@ -636,7 +636,7 @@ object SigmaTyper { None } - private val unifiedWithoutSubst = Some(emptySubst) + private val unifiedWithoutSubst = Some(EmptySubst) /** Finds a substitution `subst` of type variables such that unifyTypes(applySubst(t1, subst), t2) shouldBe Some(emptySubst) */ def unifyTypes(t1: SType, t2: SType): Option[STypeSubst] = (t1, t2) match { diff --git a/sigmastate/src/main/scala/sigmastate/lang/Terms.scala b/sigmastate/src/main/scala/sigmastate/lang/Terms.scala index 1e4d86af83..2b4bc61f11 100644 --- a/sigmastate/src/main/scala/sigmastate/lang/Terms.scala +++ b/sigmastate/src/main/scala/sigmastate/lang/Terms.scala @@ -11,6 +11,8 @@ import sigmastate.serialization.OpCodes.OpCode import sigmastate.lang.TransformingSigmaBuilder._ import scala.language.implicitConversions +import scala.collection.mutable.WrappedArray +import spire.syntax.all.cfor object Terms { @@ -99,7 +101,7 @@ object Terms { * compilation environment value. */ case class Ident(name: String, tpe: SType = NoType) extends Value[SType] { override def companion = Ident - override def opType: SFunc = SFunc(Vector(), tpe) + override def opType: SFunc = SFunc(WrappedArray.empty, tpe) } object Ident extends ValueCompanion { override def opCode: OpCode = OpCodes.Undefined @@ -107,6 +109,10 @@ object Terms { } // TODO HF: move to sigmastate.Values + /** ErgoTree node which represents application of function `func` to the given arguments. + * @param func expression which evaluates to a function + * @param args arguments of the function application + */ case class Apply(func: Value[SType], args: IndexedSeq[Value[SType]]) extends Value[SType] { override def companion = Apply override lazy val tpe: SType = func.tpe match { @@ -114,7 +120,15 @@ object Terms { case tColl: SCollectionType[_] => tColl.elemType case _ => NoType } - override def opType: SFunc = SFunc(Vector(func.tpe +: args.map(_.tpe):_*), tpe) + override lazy val opType: SFunc = { + val nArgs = args.length + val argTypes = new Array[SType](nArgs + 1) + argTypes(0) = func.tpe + cfor(0)(_ < nArgs, _ + 1) { i => + argTypes(i + 1) = args(i).tpe + } + SFunc(argTypes, tpe) + } } object Apply extends ValueCompanion { override def opCode: OpCode = OpCodes.FuncApplyCode diff --git a/sigmastate/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala index b595af5740..04b05ea70d 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala @@ -6,6 +6,7 @@ import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import ValueSerializer._ import sigmastate.utils.SigmaByteWriter.{Vlq, U, DataInfo} +import spire.syntax.all.cfor case class BlockValueSerializer(cons: (IndexedSeq[BlockItem], Value[SType]) => Value[SType]) extends ValueSerializer[BlockValue] { @@ -24,7 +25,16 @@ case class BlockValueSerializer(cons: (IndexedSeq[BlockItem], Value[SType]) => V override def parse(r: SigmaByteReader): Value[SType] = { val itemsSize = r.getUInt().toIntExact - val values = (1 to itemsSize).map(_ => r.getValue().asInstanceOf[BlockItem]) + val values: IndexedSeq[BlockItem] = if (itemsSize == 0) + BlockItem.EmptySeq + else { + // @hotspot: allocate new array only if it is not empty + val buf = new Array[BlockItem](itemsSize) + cfor(0)(_ < itemsSize, _ + 1) { i => + buf(i) = r.getValue().asInstanceOf[BlockItem] + } + buf + } val result = r.getValue() cons(values, result) } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala index 8dd4b22be3..500056251a 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala @@ -30,10 +30,17 @@ case class ConcreteCollectionBooleanConstantSerializer(cons: (IndexedSeq[Value[S override def parse(r: SigmaByteReader): Value[SCollection[SBoolean.type]] = { val size = r.getUShort() // READ val bits = r.getBits(size) // READ - val items = new Array[BoolValue](size) - cfor(0)(_ < size, _ + 1) { i => - items(i) = BooleanConstant.fromBoolean(bits(i)) + val items: IndexedSeq[Value[SBoolean.type]] = if (size == 0) { + // reusing pre-allocated immutable instances + Value.EmptySeq.asInstanceOf[IndexedSeq[Value[SBoolean.type]]] + } else { + val items = new Array[BoolValue](size) + cfor(0)(_ < size, _ + 1) { i => + items(i) = BooleanConstant.fromBoolean(bits(i)) + } + items } cons(items, SBoolean) } + } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala index d5e41465a1..82c3e24f22 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala @@ -25,11 +25,18 @@ case class ConcreteCollectionSerializer(cons: (IndexedSeq[Value[SType]], SType) override def parse(r: SigmaByteReader): Value[SCollection[SType]] = { val size = r.getUShort() // READ val tItem = r.getType() // READ - val values = new Array[SValue](size) - cfor(0)(_ < size, _ + 1) { i => - values(i) = r.getValue() // READ + val values: IndexedSeq[Value[SType]] = if (size == 0) { + // reusing pre-allocated immutable instances + Value.EmptySeq + } else { + val values = new Array[SValue](size) + cfor(0)(_ < size, _ + 1) { i => + val v = r.getValue() // READ + values(i) = v + assert(v.tpe == tItem, s"Invalid type of collection value in $values") + } + values } - assert(values.forall(_.tpe == tItem), s"Invalid type of collection value in $values") cons(values, tItem) } } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ConstantStore.scala b/sigmastate/src/main/scala/sigmastate/serialization/ConstantStore.scala index 70e9bbf8a6..0bd4588828 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ConstantStore.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ConstantStore.scala @@ -6,7 +6,7 @@ import sigmastate.lang.SigmaBuilder import debox.Buffer /** @hotspot used in deserialization (don't beautify this code) */ -class ConstantStore(private val constants: IndexedSeq[Constant[SType]] = Array[Constant[SType]]()) { +class ConstantStore(private val constants: IndexedSeq[Constant[SType]] = Constant.EmptySeq) { private val store: Buffer[Constant[SType]] = Buffer.fromIterable(constants) diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala index 719cb576cc..279f0866b0 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala @@ -210,22 +210,28 @@ class ErgoTreeSerializer { /** Deserialize constants section only. * @hotspot don't beautify this code */ - private def deserializeConstants(header: Byte, r: SigmaByteReader): Array[Constant[SType]] = { - val constants = if (ErgoTree.isConstantSegregation(header)) { - val nConsts = r.getUInt().toInt - val res = new Array[Constant[SType]](nConsts) - cfor(0)(_ < nConsts, _ + 1) { i => - res(i) = constantSerializer.deserialize(r) + private def deserializeConstants(header: Byte, r: SigmaByteReader): IndexedSeq[Constant[SType]] = { + val constants: IndexedSeq[Constant[SType]] = + if (ErgoTree.isConstantSegregation(header)) { + val nConsts = r.getUInt().toInt + if (nConsts > 0) { + // @hotspot: allocate new array only if it is not empty + val res = new Array[Constant[SType]](nConsts) + cfor(0)(_ < nConsts, _ + 1) { i => + res(i) = constantSerializer.deserialize(r) + } + res + } + else + Constant.EmptySeq } - res - } - else - Array.empty[Constant[SType]] + else + Constant.EmptySeq constants } /** Deserialize header and constant sections, but output the rest of the bytes as separate array. */ - def deserializeHeaderWithTreeBytes(r: SigmaByteReader): (Byte, Option[Int], Array[Constant[SType]], Array[Byte]) = { + def deserializeHeaderWithTreeBytes(r: SigmaByteReader): (Byte, Option[Int], IndexedSeq[Constant[SType]], Array[Byte]) = { val (header, sizeOpt) = deserializeHeaderAndSize(r) val constants = deserializeConstants(header, r) val treeBytes = r.getBytes(r.remaining) diff --git a/sigmastate/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala index a5e41fdbb2..5448963a65 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala @@ -6,8 +6,7 @@ import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import ValueSerializer._ import sigmastate.utils.SigmaByteWriter.{DataInfo, U, Vlq} - -import scala.collection.mutable +import spire.syntax.all.cfor case class FuncValueSerializer(cons: (IndexedSeq[(Int, SType)], Value[SType]) => Value[SType]) extends ValueSerializer[FuncValue] { @@ -28,14 +27,14 @@ case class FuncValueSerializer(cons: (IndexedSeq[(Int, SType)], Value[SType]) => override def parse(r: SigmaByteReader): Value[SType] = { val argsSize = r.getUInt().toIntExact - val argsBuilder = mutable.ArrayBuilder.make[(Int, SType)]() - for (_ <- 0 until argsSize) { + val args = new Array[(Int, SType)](argsSize) + cfor(0)(_ < argsSize, _ + 1) { i => val id = r.getUInt().toInt val tpe = r.getType() r.valDefTypeStore(id) = tpe - argsBuilder += ((id, tpe)) + args(i) = (id, tpe) } val body = r.getValue() - cons(argsBuilder.result(), body) + cons(args, body) } } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala index 17dbc964f6..99f4dc29e8 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala @@ -49,10 +49,17 @@ case class MethodCallSerializer(cons: (Value[SType], SMethod, IndexedSeq[Value[S val complexity = ComplexityTable.MethodCallComplexity.getOrElse((typeId, methodId), ComplexityTable.MinimalComplexity) r.addComplexity(complexity) val nArgs = args.length - val types = new Array[SType](nArgs) - cfor(0)(_ < nArgs, _ + 1) { i => - types(i) = args(i).tpe - } + + val types: Seq[SType] = + if (nArgs == 0) SType.EmptySeq + else { + val types = new Array[SType](nArgs) + cfor(0)(_ < nArgs, _ + 1) { i => + types(i) = args(i).tpe + } + types + } + val specMethod = method.specializeFor(obj.tpe, types) cons(obj, specMethod, args, Map.empty) } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/PropertyCallSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/PropertyCallSerializer.scala index f382c203e1..3a2ae84002 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/PropertyCallSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/PropertyCallSerializer.scala @@ -2,6 +2,7 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ +import sigmastate.lang.SigmaTyper import sigmastate.lang.SigmaTyper.STypeSubst import sigmastate.lang.Terms.{PropertyCall, MethodCall} import sigmastate.utils.SigmaByteWriter.DataInfo @@ -26,11 +27,10 @@ case class PropertyCallSerializer(cons: (Value[SType], SMethod, IndexedSeq[Value val typeId = r.getByte() val methodId = r.getByte() val obj = r.getValue() - val args = IndexedSeq() val method = SMethod.fromIds(typeId, methodId) val complexity = ComplexityTable.MethodCallComplexity.getOrElse((typeId, methodId), ComplexityTable.MinimalComplexity) r.addComplexity(complexity) - val specMethod = method.specializeFor(obj.tpe, args) - cons(obj, specMethod, args, Map()) + val specMethod = method.specializeFor(obj.tpe, SType.EmptySeq) + cons(obj, specMethod, Value.EmptySeq, SigmaTyper.EmptySubst) } } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala index 0578f4dc66..f579803f62 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala @@ -5,6 +5,7 @@ import sigmastate.Values._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import ValueSerializer._ import sigmastate.utils.SigmaByteWriter.{DataInfo, U} +import spire.syntax.all.cfor case class TupleSerializer(cons: Seq[Value[SType]] => Value[SType]) extends ValueSerializer[Tuple] { @@ -22,7 +23,10 @@ case class TupleSerializer(cons: Seq[Value[SType]] => Value[SType]) override def parse(r: SigmaByteReader): Value[SType] = { val size = r.getByte() - val values = (1 to size).map(_ => r.getValue()) + val values = new Array[SValue](size) // assume size > 0 so always create a new array + cfor(0)(_ < size, _ + 1) { i => + values(i) = r.getValue() + } cons(values) } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ValDefSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/ValDefSerializer.scala index 8d83ea0b21..e485df7580 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ValDefSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ValDefSerializer.scala @@ -39,7 +39,7 @@ case class ValDefSerializer(override val opDesc: ValueCompanion) extends ValueSe } inputs case ValDefCode => - Nil + STypeVar.EmptySeq } val rhs = r.getValue() r.valDefTypeStore(id) = rhs.tpe diff --git a/sigmastate/src/main/scala/sigmastate/trees.scala b/sigmastate/src/main/scala/sigmastate/trees.scala index eb56661cd2..117d0308bd 100644 --- a/sigmastate/src/main/scala/sigmastate/trees.scala +++ b/sigmastate/src/main/scala/sigmastate/trees.scala @@ -15,7 +15,7 @@ import sigmastate.utxo.{Transformer, SimpleTransformerCompanion} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer - +import spire.syntax.all.cfor /** * Basic trait for inner nodes of crypto-trees, so AND/OR/THRESHOLD sigma-protocol connectives @@ -41,10 +41,19 @@ case class CAND(override val children: Seq[SigmaBoolean]) extends SigmaConjectur object CAND { import TrivialProp._ + + /** Connects the given sigma propositions into CAND proposition performing + * partial evaluation when some of them are trivial propositioins. + * + * @param items propositions to combine into CAND + * @return CAND, TrueProp, FalseProp or even one of the items depending on partial evaluation + */ def normalized(items: Seq[SigmaBoolean]): SigmaBoolean = { require(items.nonEmpty) val res = new ArrayBuffer[SigmaBoolean]() - for (x <- items) { + val nItems = items.length + cfor(0)(_ < nItems, _ + 1) { i => + val x = items(i) x match { case FalseProp => return FalseProp case TrueProp => // skip @@ -67,10 +76,19 @@ case class COR(children: Seq[SigmaBoolean]) extends SigmaConjecture { object COR { import TrivialProp._ + + /** Connects the given sigma propositions into COR proposition performing + * partial evaluation when some of them are trivial propositioins. + * + * @param items propositions to combine into COR + * @return COR, TrueProp, FalseProp or even one of the items depending on partial evaluation + */ def normalized(items: Seq[SigmaBoolean]): SigmaBoolean = { require(items.nonEmpty) val res = new ArrayBuffer[SigmaBoolean]() - for (x <- items) { + val nItems = items.length + cfor(0)(_ < nItems, _ + 1) { i => + val x = items(i) x match { case FalseProp => // skip case TrueProp => return TrueProp @@ -153,11 +171,11 @@ case class CreateAvlTree(operationFlags: ByteValue, valueLengthOpt: Value[SIntOption]) extends AvlTreeValue { override def companion = CreateAvlTree override def tpe = SAvlTree - override def opType = CreateAvlTree.opType + override def opType = CreateAvlTree.OpType } object CreateAvlTree extends ValueCompanion { override def opCode: OpCode = OpCodes.AvlTreeCode - val opType = SFunc(IndexedSeq(SByte, SByteArray, SInt, SIntOption), SAvlTree) + val OpType = SFunc(Array(SByte, SByteArray, SInt, SIntOption), SAvlTree) } /** ErgoTree operation to create a new SigmaProp value representing public key diff --git a/sigmastate/src/main/scala/sigmastate/types.scala b/sigmastate/src/main/scala/sigmastate/types.scala index 2cec907904..1bc2017caf 100644 --- a/sigmastate/src/main/scala/sigmastate/types.scala +++ b/sigmastate/src/main/scala/sigmastate/types.scala @@ -152,6 +152,12 @@ object SType { val IndexedSeqOfT1: IndexedSeq[SType] = Array(SType.tT) val IndexedSeqOfT2: IndexedSeq[SType] = Array(SType.tT, SType.tT) + /** Immutable empty array, can be used to avoid repeated allocations. */ + val EmptyArray = Array.empty[SType] + + /** Immutable empty IndexedSeq, can be used to avoid repeated allocations. */ + val EmptySeq: IndexedSeq[SType] = EmptyArray + /** All pre-defined types should be listed here. Note, NoType is not listed. * Should be in sync with sigmastate.lang.Types.predefTypes. */ val allPredefTypes = Seq(SBoolean, SByte, SShort, SInt, SLong, SBigInt, SContext, SGlobal, SHeader, SPreHeader, SAvlTree, SGroupElement, SSigmaProp, SString, SBox, SUnit, SAny) @@ -1336,6 +1342,12 @@ case class STypeVar(name: String) extends SType { object STypeVar { val TypeCode: TypeCode = 103: Byte implicit def liftString(n: String): STypeVar = STypeVar(n) + + /** Immutable empty array, can be used to avoid repeated allocations. */ + val EmptyArray = Array.empty[STypeVar] + + /** Immutable empty IndexedSeq, can be used to avoid repeated allocations. */ + val EmptySeq: IndexedSeq[STypeVar] = EmptyArray } case object SBox extends SProduct with SPredefType with SMonoType { diff --git a/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala b/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala index 1d679c3b53..cf51dd8b5d 100644 --- a/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala +++ b/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala @@ -2,10 +2,11 @@ package sigmastate.utils import scorex.util.serialization.Reader import sigmastate.SType -import sigmastate.Values.SValue -import sigmastate.lang.exceptions.{DeserializeCallDepthExceeded, InputSizeLimitExceeded} +import sigmastate.Values.{SValue, Value} +import sigmastate.lang.exceptions.{InputSizeLimitExceeded, DeserializeCallDepthExceeded} import sigmastate.serialization._ import scorex.util.Extensions._ +import spire.syntax.all.cfor class SigmaByteReader(val r: Reader, var constantStore: ConstantStore, @@ -117,13 +118,21 @@ class SigmaByteReader(val r: Reader, lvl = v } + /** Read sequence of values from this reader. + * It first reads the number of values and then reads each value using `getValue` method. + * + * @return a sequence of zero of more values read + */ @inline def getValues(): IndexedSeq[SValue] = { val size = getUInt().toIntExact - val xs = new Array[SValue](size) - for (i <- 0 until size) { - xs(i) = getValue() + if (size == 0) Value.EmptySeq // quick short-cut when there is nothing to read + else { + val xs = new Array[SValue](size) + cfor(0)(_ < size, _ + 1) { i => + xs(i) = getValue() + } + xs } - xs } private var positionLmt: Int = r.position + r.remaining diff --git a/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala b/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala index bed8d10335..2c79e001e6 100644 --- a/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala +++ b/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala @@ -231,10 +231,14 @@ case class ExtractRegisterAs[V <: SType]( input: Value[SBox.type], override val tpe: SOption[V]) extends Extract[SOption[V]] with NotReadyValue[SOption[V]] { override def companion = ExtractRegisterAs - override val opType = SFunc(Array(SBox, SByte), tpe) + override val opType = SFunc(ExtractRegisterAs.BoxAndByte, tpe) } object ExtractRegisterAs extends ValueCompanion { override def opCode: OpCode = OpCodes.ExtractRegisterAs + + //@hotspot: avoids thousands of allocations per second + private val BoxAndByte: IndexedSeq[SType] = Array(SBox, SByte) + def apply[V <: SType](input: Value[SBox.type], registerId: RegisterId)(implicit tpe: V): ExtractRegisterAs[V] = ExtractRegisterAs(input, registerId, SOption(tpe)) diff --git a/sigmastate/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala b/sigmastate/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala index 1467acf92c..b63e9dbf3d 100644 --- a/sigmastate/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala +++ b/sigmastate/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala @@ -88,7 +88,7 @@ case class TestContractSpec(testSuite: SigmaTestingCommons)(implicit val IR: IRC ctx } def runDsl(extensions: Map[Byte, AnyValue] = Map()): SigmaProp = { - val ctx = toErgoContext.toSigmaContext(IR, false, extensions) + val ctx = toErgoContext.toSigmaContext(false, extensions) val res = utxoBox.propSpec.dslSpec(ctx) res } diff --git a/sigmastate/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala b/sigmastate/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala index 2abaf1c128..593806f3f3 100644 --- a/sigmastate/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala +++ b/sigmastate/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala @@ -173,14 +173,14 @@ trait ErgoScriptTestkit extends ContractsTestkit with LangTests } if (ergoCtx.isDefined) { - val calcCtx = ergoCtx.get.toSigmaContext(IR, isCost = false) + val calcCtx = ergoCtx.get.toSigmaContext(isCost = false) val testContractRes = testContract.map(_(calcCtx)) testContractRes.foreach { res => checkExpected(res, expectedResult.calc, "Test Contract actual: %s, expected: %s") } // check cost - val costCtx = ergoCtx.get.toSigmaContext(IR, isCost = true) + val costCtx = ergoCtx.get.toSigmaContext(isCost = true) val estimatedCost = IR.checkCost(costCtx, tree, costF, CostTable.ScriptLimit) // check size diff --git a/sigmastate/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala b/sigmastate/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala index e41d3cedad..8826339ad4 100644 --- a/sigmastate/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala +++ b/sigmastate/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala @@ -205,7 +205,7 @@ trait SigmaTestingCommons extends PropSpec val ergoCtx = ErgoLikeContextTesting.dummy(createBox(0, TrueProp)) .withBindings(1.toByte -> Constant[SType](x.asInstanceOf[SType#WrappedType], tpeA)) .withBindings(bindings: _*) - val calcCtx = ergoCtx.toSigmaContext(IR, isCost = false).asInstanceOf[CostingDataContext] + val calcCtx = ergoCtx.toSigmaContext(isCost = false).asInstanceOf[CostingDataContext] val costCtx = calcCtx.copy(isCost = true) (costCtx, calcCtx) } diff --git a/sigmastate/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala b/sigmastate/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala index a2f005ba0f..786c31eeef 100644 --- a/sigmastate/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala +++ b/sigmastate/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala @@ -87,8 +87,8 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ObjectGe } property("global methods") { - comp(env, "{ groupGenerator }") shouldBe MethodCall(Global, SGlobal.groupGeneratorMethod, IndexedSeq(), SigmaTyper.emptySubst) - comp(env, "{ Global.groupGenerator }") shouldBe MethodCall(Global, SGlobal.groupGeneratorMethod, IndexedSeq(), SigmaTyper.emptySubst) + comp(env, "{ groupGenerator }") shouldBe MethodCall(Global, SGlobal.groupGeneratorMethod, IndexedSeq(), SigmaTyper.EmptySubst) + comp(env, "{ Global.groupGenerator }") shouldBe MethodCall(Global, SGlobal.groupGeneratorMethod, IndexedSeq(), SigmaTyper.EmptySubst) comp(env, "{ Global.xor(arr1, arr2) }") shouldBe Xor(ByteArrayConstant(arr1), ByteArrayConstant(arr2)) comp(env, "{ xor(arr1, arr2) }") shouldBe Xor(ByteArrayConstant(arr1), ByteArrayConstant(arr2)) } diff --git a/sigmastate/src/test/scala/sigmastate/lang/SigmaTyperTest.scala b/sigmastate/src/test/scala/sigmastate/lang/SigmaTyperTest.scala index 00286d5f25..4926824bb1 100644 --- a/sigmastate/src/test/scala/sigmastate/lang/SigmaTyperTest.scala +++ b/sigmastate/src/test/scala/sigmastate/lang/SigmaTyperTest.scala @@ -312,12 +312,12 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan property("compute unifying type substitution: prim types") { import SigmaTyper._ forAll { t: SPredefType => - unifyTypes(t, t) shouldBe Some(emptySubst) - unifyTypes(SAny, t) shouldBe Some(emptySubst) - unifyTypes(SAny, SCollection(t)) shouldBe Some(emptySubst) - unifyTypes(SCollection(SAny), SCollection(t)) shouldBe Some(emptySubst) - unifyTypes(SCollection(SAny), STuple(t, t, t)) shouldBe Some(emptySubst) - unifyTypes(SCollection(SAny), STuple(t, STuple(t, t))) shouldBe Some(emptySubst) + unifyTypes(t, t) shouldBe Some(EmptySubst) + unifyTypes(SAny, t) shouldBe Some(EmptySubst) + unifyTypes(SAny, SCollection(t)) shouldBe Some(EmptySubst) + unifyTypes(SCollection(SAny), SCollection(t)) shouldBe Some(EmptySubst) + unifyTypes(SCollection(SAny), STuple(t, t, t)) shouldBe Some(EmptySubst) + unifyTypes(SCollection(SAny), STuple(t, STuple(t, t))) shouldBe Some(EmptySubst) } } @@ -327,11 +327,11 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan unifyTypes(t1, t2) shouldBe exp exp match { case Some(subst) => - unifyTypes(applySubst(t1, subst), t2) shouldBe Some(emptySubst) + unifyTypes(applySubst(t1, subst), t2) shouldBe Some(EmptySubst) case None => } } - def check(s1: String, s2: String, exp: Option[STypeSubst] = Some(emptySubst)): Unit = { + def check(s1: String, s2: String, exp: Option[STypeSubst] = Some(EmptySubst)): Unit = { val t1 = ty(s1); val t2 = ty(s2) checkTypes(t1, t2, exp) } @@ -408,7 +408,7 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan "((Int,Int), Coll[Boolean] => Coll[(Coll[C], Boolean)]) => Int", ("A", SInt), ("B", SBoolean)) - unifyTypes(SBoolean, SSigmaProp) shouldBe Some(emptySubst) + unifyTypes(SBoolean, SSigmaProp) shouldBe Some(EmptySubst) unifyTypes(SSigmaProp, SBoolean) shouldBe None check("(Int, Boolean)", "(Int, SigmaProp)") check("(Int, Boolean, Boolean)", "(Int, SigmaProp, SigmaProp)") diff --git a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingContract.scala b/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingContract.scala deleted file mode 100644 index 1ef6cc5102..0000000000 --- a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingContract.scala +++ /dev/null @@ -1,26 +0,0 @@ -package sigmastate.utxo.benchmarks - -import org.ergoplatform.ErgoLikeContext -import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter} -import sigmastate.interpreter.Interpreter -import sigmastate.utxo.SigmaContract - -import scala.util.Try - -abstract class CrowdFundingContract( - val timeout: Int, - val minToRaise: Long, - val backerProver: ContextEnrichingTestProvingInterpreter, - val projectProver: ContextEnrichingTestProvingInterpreter -) extends SigmaContract { - //a blockchain node verifying a block containing a spending transaction - val verifier = new ErgoLikeTestInterpreter()(backerProver.IR) - val backerPubKey = backerProver.dlogSecrets.head.publicImage - val projectPubKey = projectProver.dlogSecrets.head.publicImage - - def prove(ctx: ErgoLikeContext, fakeMessage: Array[Byte]): Array[Byte] - - def verify(proof: Array[Byte], - ctx: ErgoLikeContext, - fakeMessage: Array[Byte]): Try[Interpreter.VerificationResult] -} diff --git a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingKernelContract.scala b/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingKernelContract.scala deleted file mode 100644 index 8cd0722134..0000000000 --- a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingKernelContract.scala +++ /dev/null @@ -1,86 +0,0 @@ -package sigmastate.utxo.benchmarks - -import java.math.BigInteger -import java.util - -import org.ergoplatform.ErgoLikeContext -import sigmastate.basics.DLogProtocol.{DLogInteractiveProver, DLogProverInput, FirstDLogProverMessage, ProveDlog} -import sigmastate.basics.VerifierMessage.Challenge -import scorex.crypto.hash.Blake2b256 -import sigmastate._ -import sigmastate.helpers.ContextEnrichingTestProvingInterpreter -import sigmastate.interpreter.{CryptoConstants, Interpreter} -import sigmastate.utils.Helpers - -import scala.util.Try - -class CrowdFundingKernelContract( - timeout: Int, - minToRaise: Long, - override val backerProver: ContextEnrichingTestProvingInterpreter, - override val projectProver: ContextEnrichingTestProvingInterpreter -) extends CrowdFundingContract(timeout, minToRaise, backerProver, projectProver) { - - def isProven(pubKey: ProveDlog, message: Array[Byte]): projectProver.ProofT = { - import projectProver._ - var su = UnprovenSchnorr(pubKey, None, None, None, simulated = false) - val secret = secrets.find { - case in: DLogProverInput => in.publicImage == pubKey - case _ => false - } - val secretKnown = secret.isDefined - val simulated = !secretKnown - val step4: UnprovenTree = if (simulated) { - assert(su.challengeOpt.isDefined) - DLogInteractiveProver.simulate(su.proposition,su.challengeOpt.get).asInstanceOf[UnprovenTree] - } else { - val (r, commitment) = DLogInteractiveProver.firstMessage() - UnprovenSchnorr(pubKey, Some(commitment), Some(r), None, simulated = false) - } - - val commitments = step4 match { - case ul: UnprovenLeaf => ul.commitmentOpt.toSeq - case _ => ??? - /*case uc: UnprovenConjecture => uc.childrenCommitments*/ // can't do this anymore because internal nodes no longer have commitments - } - - val rootChallenge = Challenge @@ Blake2b256(Helpers.concatBytes(commitments.map(_.bytes) :+ message)) - - su = step4.asInstanceOf[UnprovenSchnorr] - val privKey = secret.get.asInstanceOf[DLogProverInput] - val z = DLogInteractiveProver.secondMessage(privKey, su.randomnessOpt.get, rootChallenge) - UncheckedSchnorr(su.proposition, None, rootChallenge, z) - } - - def prove(ctx: ErgoLikeContext, message: Array[Byte]): Array[Byte] = { - val c1 = ctx.preHeader.height >= timeout //&& isProven(backerPubKey, fakeMessage) - val c2 = Array( - ctx.preHeader.height < timeout, - ctx.spendingTransaction.outputs.exists(out => { - out.value >= minToRaise && - util.Arrays.equals(out.propositionBytes, projectPubKey.toSigmaProp.treeWithSegregation.bytes) - }) - ).forall(identity) - var proof: projectProver.ProofT = null - c1 || (c2 && { proof = isProven(projectPubKey, message); true}) - SigSerializer.toBytes(proof) - } - - def verify(proof: Array[Byte], - ctx: ErgoLikeContext, - message: Array[Byte]): Try[Interpreter.VerificationResult] = Try { - val sn = proof.asInstanceOf[UncheckedSchnorr] - val dlog = CryptoConstants.dlogGroup - val g = dlog.generator - val h = sn.proposition.h - - val a = dlog.multiplyGroupElements( - dlog.exponentiate(g, sn.secondMessage.z.underlying()), - dlog.inverseOf(dlog.exponentiate(h, new BigInteger(1, sn.challenge)))) - - val rootCommitment = FirstDLogProverMessage(a) - - val expectedChallenge = Blake2b256(Helpers.concatBytes(Seq(rootCommitment.bytes, message))) - util.Arrays.equals(sn.challenge, expectedChallenge) -> 0 - } -} diff --git a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingScriptContract.scala b/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingScriptContract.scala deleted file mode 100644 index 24057cca7f..0000000000 --- a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingScriptContract.scala +++ /dev/null @@ -1,54 +0,0 @@ -package sigmastate.utxo.benchmarks - -import org.ergoplatform.ErgoLikeContext -import sigmastate.SBoolean -import sigmastate.Values.{Value, SigmaPropValue} -import sigmastate.helpers.ContextEnrichingTestProvingInterpreter -import sigmastate.interpreter.Interpreter -import sigmastate.interpreter.Interpreter._ -import sigmastate.lang.Terms._ - -import scala.util.Try - -class CrowdFundingScriptContract( - timeout: Int, - minToRaise: Long, - override val backerProver: ContextEnrichingTestProvingInterpreter, - override val projectProver: ContextEnrichingTestProvingInterpreter -) extends CrowdFundingContract(timeout, minToRaise, backerProver, projectProver) { - - val compiledProposition: SigmaPropValue = { - val env = Map( - "timeout" -> timeout, - "minToRaise" -> minToRaise, - "backerPubKey" -> backerPubKey, - "projectPubKey" -> projectPubKey - ) - val compiledScript = compiler.compileWithoutCosting(env, - """{ - | val c1 = HEIGHT >= timeout && backerPubKey - | val c2 = allOf(Coll( - | HEIGHT < timeout, - | projectPubKey, - | OUTPUTS.exists({ (out: Box) => - | out.value >= minToRaise && out.propositionBytes == projectPubKey.propBytes - | }) - | )) - | c1 || c2 - | } - """.stripMargin).asSigmaProp - compiledScript - } - - def prove(ctx: ErgoLikeContext, fakeMessage: Array[Byte]): Array[Byte] = { - val proofP = projectProver.prove(compiledProposition, ctx, fakeMessage).get.proof - proofP - } - - def verify(proof: Array[Byte], - ctx: ErgoLikeContext, - fakeMessage: Array[Byte]): Try[Interpreter.VerificationResult] = { - val res = verifier.verify(emptyEnv, compiledProposition, ctx, proof, fakeMessage) - res - } -} diff --git a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdfundingBenchmark.scala b/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdfundingBenchmark.scala deleted file mode 100644 index 26f1948312..0000000000 --- a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdfundingBenchmark.scala +++ /dev/null @@ -1,82 +0,0 @@ -package sigmastate.utxo.benchmarks - - -import org.ergoplatform.{ErgoLikeContext, ErgoScriptPredef} -import sigmastate.Values._ -import sigmastate._ -import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeContextTesting, SigmaTestingCommons} -import sigmastate.helpers.TestingHelpers._ -import scalan.util.BenchmarkUtil._ - -class CrowdfundingBenchmark extends SigmaTestingCommons { - implicit lazy val IR = new TestingIRContext - def createTestContext(contract: CrowdFundingContract): ErgoLikeContext = { - val outputToSpend = testBox(10, ErgoScriptPredef.TrueProp, 0) - //First case: height < timeout, project is able to claim amount of tokens not less than required threshold - val tx1Output1 = testBox(contract.minToRaise, contract.projectPubKey, 0) - val tx1Output2 = testBox(1, contract.projectPubKey, 0) - //normally this transaction would invalid, but we're not checking it in this test - val tx = createTransaction(IndexedSeq(tx1Output1, tx1Output2)) - val ctx = ErgoLikeContextTesting( - currentHeight = contract.timeout - 1, // HEIGHT < timeout, - lastBlockUtxoRoot = AvlTreeData.dummy, - minerPubkey = ErgoLikeContextTesting.dummyPubkey, - boxesToSpend = IndexedSeq(), - spendingTransaction = tx, - self = outputToSpend) - ctx - } - - val timeout = 100 - val minToRaise = 1000L - val nIters = 10000 - val nTasks = 1 - - ignore("Evaluation by Precompiled Kernel(!!! ignore)") { - runTasks(nTasks) { iTask => - //backer's prover with his private key - val backerProver = new ContextEnrichingTestProvingInterpreter - //project's prover with his private key - val projectProver = new ContextEnrichingTestProvingInterpreter - val contract = new CrowdFundingKernelContract(timeout, minToRaise, backerProver, projectProver) - val ctx = createTestContext(contract) - - val (ok, time) = measureTime { - var res = true - for (_ <- 1 to nIters) { - val proof = contract.prove(ctx, fakeMessage) - res = contract.verify(proof, ctx, fakeMessage).get._1 - res shouldBe true - } - res - } - ok shouldBe true - println(s"Task $iTask: Thread ${Thread.currentThread().getId}: Completed $nIters iterations in $time msec") - } - } - - ignore("Evaluation by Script Interpretation(!!! ignore)") { - runTasks(nTasks) { iTask => - //backer's prover with his private key - val backerProver = new ContextEnrichingTestProvingInterpreter - //project's prover with his private key - val projectProver = new ContextEnrichingTestProvingInterpreter - val contract = new CrowdFundingScriptContract(timeout, minToRaise, backerProver, projectProver) - val ctx = createTestContext(contract) - - val (ok, time) = measureTime { - var res = true - for (_ <- 1 to nIters) { - val proof = contract.prove(ctx, fakeMessage) - res = contract.verify(proof, ctx, fakeMessage).get._1 - res shouldBe true - } - res - } - ok shouldBe true - println(s"Task $iTask: Thread ${Thread.currentThread().getId}: Completed $nIters iterations in $time msec") - } - } - - -} diff --git a/sigmastate/src/test/scala/sigmastate/utxo/examples/LetsSpecification.scala b/sigmastate/src/test/scala/sigmastate/utxo/examples/LetsSpecification.scala index c3af822427..97746d4381 100644 --- a/sigmastate/src/test/scala/sigmastate/utxo/examples/LetsSpecification.scala +++ b/sigmastate/src/test/scala/sigmastate/utxo/examples/LetsSpecification.scala @@ -166,8 +166,7 @@ import scala.util.Random some day this article will be continued! */ -class LetsSpecification extends SigmaTestingCommons { - suite => +class LetsSpecification extends SigmaTestingCommons { suite => // Not mixed with TestContext since it is not possible to call compiler.compile outside tests if mixed implicit lazy val IR: IRContext = new TestingIRContext diff --git a/sigmastate/src/test/scala/special/sigma/SigmaDslSpecification.scala b/sigmastate/src/test/scala/special/sigma/SigmaDslSpecification.scala index 86b996700f..71e16fb564 100644 --- a/sigmastate/src/test/scala/special/sigma/SigmaDslSpecification.scala +++ b/sigmastate/src/test/scala/special/sigma/SigmaDslSpecification.scala @@ -2341,7 +2341,7 @@ class SigmaDslSpecification extends SigmaDslTesting { suite => // doApply((CFunc[Int, Int](ctx, code), 10)) // } - lazy val ctx = ergoCtx.toSigmaContext(IR, false) + lazy val ctx = ergoCtx.toSigmaContext(false) property("Box properties equivalence") { val b1 = CostingBox( diff --git a/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala b/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala index 2d27ab1523..ce066c9f26 100644 --- a/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala +++ b/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala @@ -305,7 +305,7 @@ class SigmaDslTesting extends PropSpec // Add additional oparations which are not yet implemented in ErgoScript compiler val multisig = sigmastate.AtLeast( IntConstant(2), - Seq( + Array( pkAlice, DeserializeRegister(ErgoBox.R5, SSigmaProp), // deserialize pkBob DeserializeContext(2, SSigmaProp))) // deserialize pkCarol @@ -430,7 +430,7 @@ class SigmaDslTesting extends PropSpec FeatureTest(AddedFeature, script, scalaFunc, Option(expectedExpr), oldImpl, newImpl) } - val contextGen: Gen[Context] = ergoLikeContextGen.map(c => c.toSigmaContext(createIR(), isCost = false)) + val contextGen: Gen[Context] = ergoLikeContextGen.map(c => c.toSigmaContext(isCost = false)) implicit val arbContext: Arbitrary[Context] = Arbitrary(contextGen) /** NOTE, this should be `def` to allow overriding of generatorDrivenConfig in derived Spec classes. */ @@ -467,7 +467,7 @@ class SigmaDslTesting extends PropSpec printTestCases: Boolean = PrintTestCasesDefault, failOnTestVectors: Boolean = FailOnTestVectorsDefault, preGeneratedSamples: Option[Seq[A]] = None): Unit = { - + System.gc() // force GC to avoid occasional OOM exception val table = Table(("x", "y"), cases:_*) forAll(table) { (x: A, expectedRes: Try[B]) => val res = f.checkEquality(x, printTestCases).map(_._1)