From 5c24fd99618cc124dcb95bae76987514a0babbb8 Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Mon, 28 Sep 2020 13:03:25 +0300 Subject: [PATCH 01/19] memory bound IRContext: remove unnecessary IR: Evaluation parameter --- .../src/main/scala/org/ergoplatform/ErgoLikeContext.scala | 7 +++---- .../main/scala/sigmastate/interpreter/Interpreter.scala | 6 +++--- .../scala/sigmastate/interpreter/InterpreterContext.scala | 2 +- .../test/scala/org/ergoplatform/dsl/TestContractSpec.scala | 2 +- .../src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala | 4 ++-- .../scala/sigmastate/helpers/SigmaTestingCommons.scala | 2 +- .../test/scala/special/sigma/SigmaDslSpecification.scala | 2 +- .../src/test/scala/special/sigma/SigmaDslTesting.scala | 2 +- 8 files changed, 13 insertions(+), 14 deletions(-) diff --git a/sigmastate/src/main/scala/org/ergoplatform/ErgoLikeContext.scala b/sigmastate/src/main/scala/org/ergoplatform/ErgoLikeContext.scala index 438978fd34..6f5544e847 100644 --- a/sigmastate/src/main/scala/org/ergoplatform/ErgoLikeContext.scala +++ b/sigmastate/src/main/scala/org/ergoplatform/ErgoLikeContext.scala @@ -118,17 +118,16 @@ class ErgoLikeContext(val lastBlockUtxoRoot: AvlTreeData, dataBoxes, boxesToSpend, newSpendingTransaction, selfIndex, extension, validationSettings, costLimit, initCost) - override def toSigmaContext(IR: Evaluation, isCost: Boolean, extensions: Map[Byte, AnyValue] = Map()): sigma.Context = { - implicit val IRForBox: Evaluation = IR + override def toSigmaContext(isCost: Boolean, extensions: Map[Byte, AnyValue] = Map()): sigma.Context = { import Evaluation._ - def contextVars(m: Map[Byte, AnyValue])(implicit IR: Evaluation): Coll[AnyValue] = { + def contextVars(m: Map[Byte, AnyValue]): Coll[AnyValue] = { val maxKey = if (m.keys.isEmpty) 0 else m.keys.max val res = new Array[AnyValue](maxKey + 1) for ((id, v) <- m) { res(id) = v } - IR.sigmaDslBuilderValue.Colls.fromArray(res) + CostingSigmaDslBuilder.Colls.fromArray(res) } val dataInputs = this.dataBoxes.toArray.map(_.toTestBox(isCost)).toColl diff --git a/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala b/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala index 1c78a7f82f..61a5378fa4 100644 --- a/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala +++ b/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala @@ -115,7 +115,7 @@ trait Interpreter extends ScorexLogging { (res, currContext.value) } - def calcResult(context: special.sigma.Context, calcF: Ref[IR.Context => Any]): special.sigma.SigmaProp = { + private def calcResult(context: special.sigma.Context, calcF: Ref[IR.Context => Any]): special.sigma.SigmaProp = { import IR._ import Context._ import SigmaProp._ @@ -156,7 +156,7 @@ trait Interpreter extends ScorexLogging { CheckCostFunc(IR)(asRep[Any => Int](costF)) - val costingCtx = context.toSigmaContext(IR, isCost = true) + val costingCtx = context.toSigmaContext(isCost = true) val estimatedCost = IR.checkCostWithContext(costingCtx, costF, maxCost, initCost).getOrThrow IR.onEstimatedCost(env, exp, costingRes, costingCtx, estimatedCost) @@ -164,7 +164,7 @@ trait Interpreter extends ScorexLogging { // check calc val calcF = costingRes.calcF CheckCalcFunc(IR)(calcF) - val calcCtx = context.toSigmaContext(IR, isCost = false) + val calcCtx = context.toSigmaContext(isCost = false) val res = calcResult(calcCtx, calcF) SigmaDsl.toSigmaBoolean(res) -> estimatedCost } diff --git a/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala b/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala index ea2470c0e7..ebe83519b0 100644 --- a/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala +++ b/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala @@ -65,5 +65,5 @@ trait InterpreterContext { def withValidationSettings(newVs: SigmaValidationSettings): InterpreterContext /** Creates `special.sigma.Context` instance based on this context. */ - def toSigmaContext(IR: Evaluation, isCost: Boolean, extensions: Map[Byte, AnyValue] = Map()): sigma.Context + def toSigmaContext(isCost: Boolean, extensions: Map[Byte, AnyValue] = Map()): sigma.Context } diff --git a/sigmastate/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala b/sigmastate/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala index 1467acf92c..b63e9dbf3d 100644 --- a/sigmastate/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala +++ b/sigmastate/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala @@ -88,7 +88,7 @@ case class TestContractSpec(testSuite: SigmaTestingCommons)(implicit val IR: IRC ctx } def runDsl(extensions: Map[Byte, AnyValue] = Map()): SigmaProp = { - val ctx = toErgoContext.toSigmaContext(IR, false, extensions) + val ctx = toErgoContext.toSigmaContext(false, extensions) val res = utxoBox.propSpec.dslSpec(ctx) res } diff --git a/sigmastate/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala b/sigmastate/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala index 2abaf1c128..593806f3f3 100644 --- a/sigmastate/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala +++ b/sigmastate/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala @@ -173,14 +173,14 @@ trait ErgoScriptTestkit extends ContractsTestkit with LangTests } if (ergoCtx.isDefined) { - val calcCtx = ergoCtx.get.toSigmaContext(IR, isCost = false) + val calcCtx = ergoCtx.get.toSigmaContext(isCost = false) val testContractRes = testContract.map(_(calcCtx)) testContractRes.foreach { res => checkExpected(res, expectedResult.calc, "Test Contract actual: %s, expected: %s") } // check cost - val costCtx = ergoCtx.get.toSigmaContext(IR, isCost = true) + val costCtx = ergoCtx.get.toSigmaContext(isCost = true) val estimatedCost = IR.checkCost(costCtx, tree, costF, CostTable.ScriptLimit) // check size diff --git a/sigmastate/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala b/sigmastate/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala index e41d3cedad..8826339ad4 100644 --- a/sigmastate/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala +++ b/sigmastate/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala @@ -205,7 +205,7 @@ trait SigmaTestingCommons extends PropSpec val ergoCtx = ErgoLikeContextTesting.dummy(createBox(0, TrueProp)) .withBindings(1.toByte -> Constant[SType](x.asInstanceOf[SType#WrappedType], tpeA)) .withBindings(bindings: _*) - val calcCtx = ergoCtx.toSigmaContext(IR, isCost = false).asInstanceOf[CostingDataContext] + val calcCtx = ergoCtx.toSigmaContext(isCost = false).asInstanceOf[CostingDataContext] val costCtx = calcCtx.copy(isCost = true) (costCtx, calcCtx) } diff --git a/sigmastate/src/test/scala/special/sigma/SigmaDslSpecification.scala b/sigmastate/src/test/scala/special/sigma/SigmaDslSpecification.scala index 86b996700f..71e16fb564 100644 --- a/sigmastate/src/test/scala/special/sigma/SigmaDslSpecification.scala +++ b/sigmastate/src/test/scala/special/sigma/SigmaDslSpecification.scala @@ -2341,7 +2341,7 @@ class SigmaDslSpecification extends SigmaDslTesting { suite => // doApply((CFunc[Int, Int](ctx, code), 10)) // } - lazy val ctx = ergoCtx.toSigmaContext(IR, false) + lazy val ctx = ergoCtx.toSigmaContext(false) property("Box properties equivalence") { val b1 = CostingBox( diff --git a/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala b/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala index 2d27ab1523..80e9eb0c8c 100644 --- a/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala +++ b/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala @@ -430,7 +430,7 @@ class SigmaDslTesting extends PropSpec FeatureTest(AddedFeature, script, scalaFunc, Option(expectedExpr), oldImpl, newImpl) } - val contextGen: Gen[Context] = ergoLikeContextGen.map(c => c.toSigmaContext(createIR(), isCost = false)) + val contextGen: Gen[Context] = ergoLikeContextGen.map(c => c.toSigmaContext(isCost = false)) implicit val arbContext: Arbitrary[Context] = Arbitrary(contextGen) /** NOTE, this should be `def` to allow overriding of generatorDrivenConfig in derived Spec classes. */ From 840613f0c5b3bcb3fb0d7d3859663f5b0689437f Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Mon, 28 Sep 2020 16:17:46 +0300 Subject: [PATCH 02/19] memory bound IRContext: removing old CrowdfundingBenchmark code --- .../benchmarks/CrowdFundingContract.scala | 26 ------ .../CrowdFundingKernelContract.scala | 86 ------------------- .../CrowdFundingScriptContract.scala | 54 ------------ .../benchmarks/CrowdfundingBenchmark.scala | 82 ------------------ 4 files changed, 248 deletions(-) delete mode 100644 sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingContract.scala delete mode 100644 sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingKernelContract.scala delete mode 100644 sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingScriptContract.scala delete mode 100644 sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdfundingBenchmark.scala diff --git a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingContract.scala b/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingContract.scala deleted file mode 100644 index 1ef6cc5102..0000000000 --- a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingContract.scala +++ /dev/null @@ -1,26 +0,0 @@ -package sigmastate.utxo.benchmarks - -import org.ergoplatform.ErgoLikeContext -import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter} -import sigmastate.interpreter.Interpreter -import sigmastate.utxo.SigmaContract - -import scala.util.Try - -abstract class CrowdFundingContract( - val timeout: Int, - val minToRaise: Long, - val backerProver: ContextEnrichingTestProvingInterpreter, - val projectProver: ContextEnrichingTestProvingInterpreter -) extends SigmaContract { - //a blockchain node verifying a block containing a spending transaction - val verifier = new ErgoLikeTestInterpreter()(backerProver.IR) - val backerPubKey = backerProver.dlogSecrets.head.publicImage - val projectPubKey = projectProver.dlogSecrets.head.publicImage - - def prove(ctx: ErgoLikeContext, fakeMessage: Array[Byte]): Array[Byte] - - def verify(proof: Array[Byte], - ctx: ErgoLikeContext, - fakeMessage: Array[Byte]): Try[Interpreter.VerificationResult] -} diff --git a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingKernelContract.scala b/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingKernelContract.scala deleted file mode 100644 index 8cd0722134..0000000000 --- a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingKernelContract.scala +++ /dev/null @@ -1,86 +0,0 @@ -package sigmastate.utxo.benchmarks - -import java.math.BigInteger -import java.util - -import org.ergoplatform.ErgoLikeContext -import sigmastate.basics.DLogProtocol.{DLogInteractiveProver, DLogProverInput, FirstDLogProverMessage, ProveDlog} -import sigmastate.basics.VerifierMessage.Challenge -import scorex.crypto.hash.Blake2b256 -import sigmastate._ -import sigmastate.helpers.ContextEnrichingTestProvingInterpreter -import sigmastate.interpreter.{CryptoConstants, Interpreter} -import sigmastate.utils.Helpers - -import scala.util.Try - -class CrowdFundingKernelContract( - timeout: Int, - minToRaise: Long, - override val backerProver: ContextEnrichingTestProvingInterpreter, - override val projectProver: ContextEnrichingTestProvingInterpreter -) extends CrowdFundingContract(timeout, minToRaise, backerProver, projectProver) { - - def isProven(pubKey: ProveDlog, message: Array[Byte]): projectProver.ProofT = { - import projectProver._ - var su = UnprovenSchnorr(pubKey, None, None, None, simulated = false) - val secret = secrets.find { - case in: DLogProverInput => in.publicImage == pubKey - case _ => false - } - val secretKnown = secret.isDefined - val simulated = !secretKnown - val step4: UnprovenTree = if (simulated) { - assert(su.challengeOpt.isDefined) - DLogInteractiveProver.simulate(su.proposition,su.challengeOpt.get).asInstanceOf[UnprovenTree] - } else { - val (r, commitment) = DLogInteractiveProver.firstMessage() - UnprovenSchnorr(pubKey, Some(commitment), Some(r), None, simulated = false) - } - - val commitments = step4 match { - case ul: UnprovenLeaf => ul.commitmentOpt.toSeq - case _ => ??? - /*case uc: UnprovenConjecture => uc.childrenCommitments*/ // can't do this anymore because internal nodes no longer have commitments - } - - val rootChallenge = Challenge @@ Blake2b256(Helpers.concatBytes(commitments.map(_.bytes) :+ message)) - - su = step4.asInstanceOf[UnprovenSchnorr] - val privKey = secret.get.asInstanceOf[DLogProverInput] - val z = DLogInteractiveProver.secondMessage(privKey, su.randomnessOpt.get, rootChallenge) - UncheckedSchnorr(su.proposition, None, rootChallenge, z) - } - - def prove(ctx: ErgoLikeContext, message: Array[Byte]): Array[Byte] = { - val c1 = ctx.preHeader.height >= timeout //&& isProven(backerPubKey, fakeMessage) - val c2 = Array( - ctx.preHeader.height < timeout, - ctx.spendingTransaction.outputs.exists(out => { - out.value >= minToRaise && - util.Arrays.equals(out.propositionBytes, projectPubKey.toSigmaProp.treeWithSegregation.bytes) - }) - ).forall(identity) - var proof: projectProver.ProofT = null - c1 || (c2 && { proof = isProven(projectPubKey, message); true}) - SigSerializer.toBytes(proof) - } - - def verify(proof: Array[Byte], - ctx: ErgoLikeContext, - message: Array[Byte]): Try[Interpreter.VerificationResult] = Try { - val sn = proof.asInstanceOf[UncheckedSchnorr] - val dlog = CryptoConstants.dlogGroup - val g = dlog.generator - val h = sn.proposition.h - - val a = dlog.multiplyGroupElements( - dlog.exponentiate(g, sn.secondMessage.z.underlying()), - dlog.inverseOf(dlog.exponentiate(h, new BigInteger(1, sn.challenge)))) - - val rootCommitment = FirstDLogProverMessage(a) - - val expectedChallenge = Blake2b256(Helpers.concatBytes(Seq(rootCommitment.bytes, message))) - util.Arrays.equals(sn.challenge, expectedChallenge) -> 0 - } -} diff --git a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingScriptContract.scala b/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingScriptContract.scala deleted file mode 100644 index 24057cca7f..0000000000 --- a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingScriptContract.scala +++ /dev/null @@ -1,54 +0,0 @@ -package sigmastate.utxo.benchmarks - -import org.ergoplatform.ErgoLikeContext -import sigmastate.SBoolean -import sigmastate.Values.{Value, SigmaPropValue} -import sigmastate.helpers.ContextEnrichingTestProvingInterpreter -import sigmastate.interpreter.Interpreter -import sigmastate.interpreter.Interpreter._ -import sigmastate.lang.Terms._ - -import scala.util.Try - -class CrowdFundingScriptContract( - timeout: Int, - minToRaise: Long, - override val backerProver: ContextEnrichingTestProvingInterpreter, - override val projectProver: ContextEnrichingTestProvingInterpreter -) extends CrowdFundingContract(timeout, minToRaise, backerProver, projectProver) { - - val compiledProposition: SigmaPropValue = { - val env = Map( - "timeout" -> timeout, - "minToRaise" -> minToRaise, - "backerPubKey" -> backerPubKey, - "projectPubKey" -> projectPubKey - ) - val compiledScript = compiler.compileWithoutCosting(env, - """{ - | val c1 = HEIGHT >= timeout && backerPubKey - | val c2 = allOf(Coll( - | HEIGHT < timeout, - | projectPubKey, - | OUTPUTS.exists({ (out: Box) => - | out.value >= minToRaise && out.propositionBytes == projectPubKey.propBytes - | }) - | )) - | c1 || c2 - | } - """.stripMargin).asSigmaProp - compiledScript - } - - def prove(ctx: ErgoLikeContext, fakeMessage: Array[Byte]): Array[Byte] = { - val proofP = projectProver.prove(compiledProposition, ctx, fakeMessage).get.proof - proofP - } - - def verify(proof: Array[Byte], - ctx: ErgoLikeContext, - fakeMessage: Array[Byte]): Try[Interpreter.VerificationResult] = { - val res = verifier.verify(emptyEnv, compiledProposition, ctx, proof, fakeMessage) - res - } -} diff --git a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdfundingBenchmark.scala b/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdfundingBenchmark.scala deleted file mode 100644 index 26f1948312..0000000000 --- a/sigmastate/src/test/scala/sigmastate/utxo/benchmarks/CrowdfundingBenchmark.scala +++ /dev/null @@ -1,82 +0,0 @@ -package sigmastate.utxo.benchmarks - - -import org.ergoplatform.{ErgoLikeContext, ErgoScriptPredef} -import sigmastate.Values._ -import sigmastate._ -import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeContextTesting, SigmaTestingCommons} -import sigmastate.helpers.TestingHelpers._ -import scalan.util.BenchmarkUtil._ - -class CrowdfundingBenchmark extends SigmaTestingCommons { - implicit lazy val IR = new TestingIRContext - def createTestContext(contract: CrowdFundingContract): ErgoLikeContext = { - val outputToSpend = testBox(10, ErgoScriptPredef.TrueProp, 0) - //First case: height < timeout, project is able to claim amount of tokens not less than required threshold - val tx1Output1 = testBox(contract.minToRaise, contract.projectPubKey, 0) - val tx1Output2 = testBox(1, contract.projectPubKey, 0) - //normally this transaction would invalid, but we're not checking it in this test - val tx = createTransaction(IndexedSeq(tx1Output1, tx1Output2)) - val ctx = ErgoLikeContextTesting( - currentHeight = contract.timeout - 1, // HEIGHT < timeout, - lastBlockUtxoRoot = AvlTreeData.dummy, - minerPubkey = ErgoLikeContextTesting.dummyPubkey, - boxesToSpend = IndexedSeq(), - spendingTransaction = tx, - self = outputToSpend) - ctx - } - - val timeout = 100 - val minToRaise = 1000L - val nIters = 10000 - val nTasks = 1 - - ignore("Evaluation by Precompiled Kernel(!!! ignore)") { - runTasks(nTasks) { iTask => - //backer's prover with his private key - val backerProver = new ContextEnrichingTestProvingInterpreter - //project's prover with his private key - val projectProver = new ContextEnrichingTestProvingInterpreter - val contract = new CrowdFundingKernelContract(timeout, minToRaise, backerProver, projectProver) - val ctx = createTestContext(contract) - - val (ok, time) = measureTime { - var res = true - for (_ <- 1 to nIters) { - val proof = contract.prove(ctx, fakeMessage) - res = contract.verify(proof, ctx, fakeMessage).get._1 - res shouldBe true - } - res - } - ok shouldBe true - println(s"Task $iTask: Thread ${Thread.currentThread().getId}: Completed $nIters iterations in $time msec") - } - } - - ignore("Evaluation by Script Interpretation(!!! ignore)") { - runTasks(nTasks) { iTask => - //backer's prover with his private key - val backerProver = new ContextEnrichingTestProvingInterpreter - //project's prover with his private key - val projectProver = new ContextEnrichingTestProvingInterpreter - val contract = new CrowdFundingScriptContract(timeout, minToRaise, backerProver, projectProver) - val ctx = createTestContext(contract) - - val (ok, time) = measureTime { - var res = true - for (_ <- 1 to nIters) { - val proof = contract.prove(ctx, fakeMessage) - res = contract.verify(proof, ctx, fakeMessage).get._1 - res shouldBe true - } - res - } - ok shouldBe true - println(s"Task $iTask: Thread ${Thread.currentThread().getId}: Completed $nIters iterations in $time msec") - } - } - - -} From 60d718ac5be94b05060861da1a7ca0c973f380c6 Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Mon, 12 Oct 2020 10:18:42 +0300 Subject: [PATCH 03/19] more optimizations: avoid lookups for Monoids and Global in Evaluation --- .../scala/sigmastate/eval/Evaluation.scala | 27 +++++++++++++------ .../scala/sigmastate/eval/IRContext.scala | 2 +- .../sigmastate/interpreter/Interpreter.scala | 6 ++--- 3 files changed, 23 insertions(+), 12 deletions(-) diff --git a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala index c1a6fe3d54..83adedde85 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala @@ -451,8 +451,6 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => case _ => error(s"Cannot find value in environment for $s (dataEnv = $dataEnv)") } - def msgCostLimitError(cost: Long, limit: Long) = s"Estimated execution cost $cost exceeds the limit $limit" - /** Incapsulate simple monotonic (add only) counter with reset. */ class CostCounter(val initialCost: Int) { private var _currentCost: Int = initialCost @@ -558,7 +556,7 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => // if (cost < limit) // println(s"FAIL FAST in loop: $accumulatedCost > $limit") // TODO cover with tests - throw new CostLimitException(accumulatedCost, msgCostLimitError(accumulatedCost, limit), None) + throw new CostLimitException(accumulatedCost, Evaluation.msgCostLimitError(accumulatedCost, limit), None) } } @@ -635,11 +633,22 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => case wc: LiftedConst[_,_] => out(wc.constValue) - case _: SigmaDslBuilder | _: CollBuilder | _: CostedBuilder | - _: WSpecialPredefCompanion | - _: IntPlusMonoid | _: LongPlusMonoid | - MBM.intPlusMonoid(_) | MBM.longPlusMonoid(_) => // TODO no HF proof - out(dataEnv.getOrElse(sym, !!!(s"Cannot resolve companion instance for $sym -> ${sym.node}"))) + case _: IntPlusMonoid | MBM.intPlusMonoid(_) => + // always return the same value since monoids are singletons + out(monoidBuilderValue.intPlusMonoid) + + case _: LongPlusMonoid | MBM.longPlusMonoid(_) => + // always return the same value since monoids are singletons + out(monoidBuilderValue.longPlusMonoid) + + case _: SigmaDslBuilder => + // always return the same value since SigmaDslBuilder is singleton + out(sigmaDslBuilderValue) + + case _: CollBuilder | _: CostedBuilder | _: WSpecialPredefCompanion => + out(dataEnv.getOrElse(sym, { + !!!(s"Cannot resolve companion instance for $sym -> ${sym.node}") + })) case SigmaM.isValid(In(prop: AnyRef)) => out(prop) @@ -853,6 +862,8 @@ object Evaluation { import special.sigma._ import special.collection._ + def msgCostLimitError(cost: Long, limit: Long) = s"Estimated execution cost $cost exceeds the limit $limit" + /** Transforms a serializable ErgoTree type descriptor to the corresponding RType descriptor of SigmaDsl, * which is used during evaluation. */ diff --git a/sigmastate/src/main/scala/sigmastate/eval/IRContext.scala b/sigmastate/src/main/scala/sigmastate/eval/IRContext.scala index 3f5b27b762..9d24faec88 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/IRContext.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/IRContext.scala @@ -131,7 +131,7 @@ trait IRContext extends Evaluation with TreeBuilding { val totalCost = JMath.addExact(initCost, scaledCost) if (totalCost > maxCost) { // TODO cover with tests - throw new CostLimitException(totalCost, msgCostLimitError(totalCost, maxCost), None) + throw new CostLimitException(totalCost, Evaluation.msgCostLimitError(totalCost, maxCost), None) } totalCost.toInt } diff --git a/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala b/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala index 61a5378fa4..619505720a 100644 --- a/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala +++ b/sigmastate/src/main/scala/sigmastate/interpreter/Interpreter.scala @@ -10,7 +10,7 @@ import sigmastate.basics.DLogProtocol.{DLogInteractiveProver, FirstDLogProverMes import scorex.util.ScorexLogging import sigmastate.SCollection.SByteArray import sigmastate.Values._ -import sigmastate.eval.{IRContext, Sized} +import sigmastate.eval.{IRContext, Sized, Evaluation} import sigmastate.lang.Terms.ValueOps import sigmastate.basics._ import sigmastate.interpreter.Interpreter.{ScriptEnv, VerificationResult} @@ -53,7 +53,7 @@ trait Interpreter extends ScorexLogging { val currCost = JMath.addExact(context.initCost, scriptComplexity) val remainingLimit = context.costLimit - currCost if (remainingLimit <= 0) - throw new CostLimitException(currCost, msgCostLimitError(currCost, context.costLimit), None) // TODO cover with tests + throw new CostLimitException(currCost, Evaluation.msgCostLimitError(currCost, context.costLimit), None) // TODO cover with tests val ctx1 = context.withInitCost(currCost).asInstanceOf[CTX] (ctx1, script) @@ -232,7 +232,7 @@ trait Interpreter extends ScorexLogging { val initCost = JMath.addExact(ergoTree.complexity.toLong, context.initCost) val remainingLimit = context.costLimit - initCost if (remainingLimit <= 0) - throw new CostLimitException(initCost, msgCostLimitError(initCost, context.costLimit), None) // TODO cover with tests + throw new CostLimitException(initCost, Evaluation.msgCostLimitError(initCost, context.costLimit), None) // TODO cover with tests val contextWithCost = context.withInitCost(initCost).asInstanceOf[CTX] From 85e8ca13bfd250bb1ab64bcf73f05bb68679f297 Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Wed, 30 Sep 2020 16:36:59 +0300 Subject: [PATCH 04/19] memory bound IRContext: optimize initializaiton of new IRContext instances --- .../scala/sigmastate/eval/Evaluation.scala | 167 +++++++++--------- 1 file changed, 84 insertions(+), 83 deletions(-) diff --git a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala index 83adedde85..87eff10139 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala @@ -81,94 +81,12 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => private val SPCM = WSpecialPredefCompanionMethods private val MBM = MonoidBuilderMethods - private val _allowedOpCodesInCosting: HashSet[OpCodeExtra] = HashSet[OpCode]( - AppendCode, - ByIndexCode, - ConstantCode, - DivisionCode, - DowncastCode, - ExtractBytesWithNoRefCode, - ExtractRegisterAs, - ExtractScriptBytesCode, - FoldCode, - FuncApplyCode, - FuncValueCode, - GetVarCode, - InputsCode, - LastBlockUtxoRootHashCode, - MapCollectionCode, - FlatMapCollectionCode, - MaxCode, - MethodCallCode, - MinCode, - MinusCode, - ModuloCode, - MultiplyCode, - OptionGetCode, - OptionGetOrElseCode, - OptionIsDefinedCode, - OutputsCode, - PlusCode, - SelectFieldCode, - SelfCode, - SigmaPropBytesCode, - SizeOfCode, - SliceCode, - TupleCode, - UpcastCode - ).map(toExtra) ++ HashSet[OpCodeExtra]( - OpCostCode, - PerKbCostOfCode, - CastCode, - IntPlusMonoidCode, - ThunkDefCode, - ThunkForceCode, - SCMInputsCode, - SCMOutputsCode, - SCMDataInputsCode, - SCMSelfBoxCode, - SCMLastBlockUtxoRootHashCode, - SCMHeadersCode, - SCMPreHeaderCode, - SCMGetVarCode, - SBMPropositionBytesCode, - SBMBytesCode, - SBMBytesWithoutRefCode, - SBMRegistersCode, - SBMGetRegCode, - SBMTokensCode, - SSPMPropBytesCode, - SAVMTValCode, - SAVMValueSizeCode, - SizeMDataSizeCode, - SPairLCode, - SPairRCode, - SCollMSizesCode, - SOptMSizeOptCode, - SFuncMSizeEnvCode, - CSizePairCtorCode, - CSizeFuncCtorCode, - CSizeOptionCtorCode, - CSizeCollCtorCode, - CSizeBoxCtorCode, - CSizeContextCtorCode, - CSizeAnyValueCtorCode, - CReplCollCtorCode, - PairOfColsCtorCode, - CollMSumCode, - CBMReplicateCode, - CBMFromItemsCode, - CostOfCode, - UOSizeOfCode, - SPCMSomeCode - ) - /** Returns a set of opCodeEx values (extended op codes) which are allowed in cost function. * This may include both ErgoTree codes (from OpCodes) and also additional non-ErgoTree codes * from OpCodesExtra. * Any IR graph node can be uniquely assigned to extended op code value * from OpCodes + OpCodesExtra combined range. (See getOpCodeEx) */ - protected def allowedOpCodesInCosting: HashSet[OpCodeExtra] = _allowedOpCodesInCosting + protected def allowedOpCodesInCosting: HashSet[OpCodeExtra] = Evaluation.AllowedOpCodesInCosting def isAllowedOpCodeInCosting(opCode: OpCodeExtra): Boolean = allowedOpCodesInCosting.contains(opCode) @@ -861,6 +779,89 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => object Evaluation { import special.sigma._ import special.collection._ + import OpCodes._ + + val AllowedOpCodesInCosting: HashSet[OpCodeExtra] = HashSet[OpCode]( + AppendCode, + ByIndexCode, + ConstantCode, + DivisionCode, + DowncastCode, + ExtractBytesWithNoRefCode, + ExtractRegisterAs, + ExtractScriptBytesCode, + FoldCode, + FuncApplyCode, + FuncValueCode, + GetVarCode, + InputsCode, + LastBlockUtxoRootHashCode, + MapCollectionCode, + FlatMapCollectionCode, + MaxCode, + MethodCallCode, + MinCode, + MinusCode, + ModuloCode, + MultiplyCode, + OptionGetCode, + OptionGetOrElseCode, + OptionIsDefinedCode, + OutputsCode, + PlusCode, + SelectFieldCode, + SelfCode, + SigmaPropBytesCode, + SizeOfCode, + SliceCode, + TupleCode, + UpcastCode + ).map(toExtra) ++ HashSet[OpCodeExtra]( + OpCostCode, + PerKbCostOfCode, + CastCode, + IntPlusMonoidCode, + ThunkDefCode, + ThunkForceCode, + SCMInputsCode, + SCMOutputsCode, + SCMDataInputsCode, + SCMSelfBoxCode, + SCMLastBlockUtxoRootHashCode, + SCMHeadersCode, + SCMPreHeaderCode, + SCMGetVarCode, + SBMPropositionBytesCode, + SBMBytesCode, + SBMBytesWithoutRefCode, + SBMRegistersCode, + SBMGetRegCode, + SBMTokensCode, + SSPMPropBytesCode, + SAVMTValCode, + SAVMValueSizeCode, + SizeMDataSizeCode, + SPairLCode, + SPairRCode, + SCollMSizesCode, + SOptMSizeOptCode, + SFuncMSizeEnvCode, + CSizePairCtorCode, + CSizeFuncCtorCode, + CSizeOptionCtorCode, + CSizeCollCtorCode, + CSizeBoxCtorCode, + CSizeContextCtorCode, + CSizeAnyValueCtorCode, + CReplCollCtorCode, + PairOfColsCtorCode, + CollMSumCode, + CBMReplicateCode, + CBMFromItemsCode, + CostOfCode, + UOSizeOfCode, + SPCMSomeCode + ) def msgCostLimitError(cost: Long, limit: Long) = s"Estimated execution cost $cost exceeds the limit $limit" From a709fa4f510dec50debae5ec80b7b3ba837b3018 Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Mon, 12 Oct 2020 10:43:26 +0300 Subject: [PATCH 05/19] more optimizations: using cfor + shared empty arrays --- sigmastate/src/main/scala/sigmastate/Values.scala | 11 +++++++++++ .../main/scala/sigmastate/eval/TreeBuilding.scala | 4 ++-- .../src/main/scala/sigmastate/lang/Terms.scala | 14 ++++++++++++-- .../serialization/BlockValueSerializer.scala | 12 +++++++++++- .../serialization/ErgoTreeSerializer.scala | 15 ++++++++++----- sigmastate/src/main/scala/sigmastate/trees.scala | 10 +++++++--- 6 files changed, 53 insertions(+), 13 deletions(-) diff --git a/sigmastate/src/main/scala/sigmastate/Values.scala b/sigmastate/src/main/scala/sigmastate/Values.scala index cb029de642..268b3c3a95 100644 --- a/sigmastate/src/main/scala/sigmastate/Values.scala +++ b/sigmastate/src/main/scala/sigmastate/Values.scala @@ -183,6 +183,10 @@ object Values { object Constant extends ValueCompanion { override def opCode: OpCode = ConstantCode + + /** Immutable empty array to save allocations in many places. */ + val EmptyArray = Array.empty[Constant[SType]] + def apply[S <: SType](value: S#WrappedType, tpe: S): Constant[S] = ConstantNode(value, tpe) def unapply[S <: SType](v: EvaluatedValue[S]): Option[(S#WrappedType, S)] = v match { case ConstantNode(value, tpe) => Some((value, tpe)) @@ -751,6 +755,13 @@ object Values { def rhs: SValue def isValDef: Boolean } + object BlockItem { + /** Immutable empty array to save allocations in many places. */ + val EmptyArray = Array.empty[BlockItem] + + /** Immutable empty IndexedSeq to save allocations in many places. */ + val EmptySeq: IndexedSeq[BlockItem] = EmptyArray + } /** IR node for let-bound expressions `let x = rhs` which is ValDef, or `let f[T] = rhs` which is FunDef. * These nodes are used to represent ErgoTrees after common sub-expression elimination. diff --git a/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala b/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala index 92e4018a96..dc3fc73cc3 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala @@ -1,7 +1,7 @@ package sigmastate.eval -import sigmastate.Values.{BlockValue, BoolValue, Constant, ConstantNode, SValue, SigmaPropConstant, ValDef, ValUse, Value} +import sigmastate.Values.{BlockItem, BlockValue, BoolValue, Constant, ConstantNode, SValue, SigmaPropConstant, ValDef, ValUse, Value} import org.ergoplatform._ import org.ergoplatform.{Height, Inputs, Outputs, Self} @@ -422,7 +422,7 @@ trait TreeBuilding extends RuntimeCosting { IR: IRContext => } val Seq(root) = subG.roots val rhs = buildValue(ctx, mainG, curEnv, root, curId, constantsProcessing) - val res = if (valdefs.nonEmpty) BlockValue(valdefs.toIndexedSeq, rhs) else rhs + val res = if (valdefs.nonEmpty) BlockValue(valdefs.toArray[BlockItem], rhs) else rhs res } diff --git a/sigmastate/src/main/scala/sigmastate/lang/Terms.scala b/sigmastate/src/main/scala/sigmastate/lang/Terms.scala index 1e4d86af83..962fdde3a7 100644 --- a/sigmastate/src/main/scala/sigmastate/lang/Terms.scala +++ b/sigmastate/src/main/scala/sigmastate/lang/Terms.scala @@ -11,6 +11,8 @@ import sigmastate.serialization.OpCodes.OpCode import sigmastate.lang.TransformingSigmaBuilder._ import scala.language.implicitConversions +import scala.collection.mutable.WrappedArray +import spire.syntax.all.cfor object Terms { @@ -99,7 +101,7 @@ object Terms { * compilation environment value. */ case class Ident(name: String, tpe: SType = NoType) extends Value[SType] { override def companion = Ident - override def opType: SFunc = SFunc(Vector(), tpe) + override def opType: SFunc = SFunc(WrappedArray.empty, tpe) } object Ident extends ValueCompanion { override def opCode: OpCode = OpCodes.Undefined @@ -114,7 +116,15 @@ object Terms { case tColl: SCollectionType[_] => tColl.elemType case _ => NoType } - override def opType: SFunc = SFunc(Vector(func.tpe +: args.map(_.tpe):_*), tpe) + override lazy val opType: SFunc = { + val nArgs = args.length + val argTypes = new Array[SType](nArgs + 1) + argTypes(0) = func.tpe + cfor(0)(_ < nArgs, _ + 1) { i => + argTypes(i + 1) = args(i).tpe + } + SFunc(argTypes, tpe) + } } object Apply extends ValueCompanion { override def opCode: OpCode = OpCodes.FuncApplyCode diff --git a/sigmastate/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala index b595af5740..04b05ea70d 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala @@ -6,6 +6,7 @@ import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import ValueSerializer._ import sigmastate.utils.SigmaByteWriter.{Vlq, U, DataInfo} +import spire.syntax.all.cfor case class BlockValueSerializer(cons: (IndexedSeq[BlockItem], Value[SType]) => Value[SType]) extends ValueSerializer[BlockValue] { @@ -24,7 +25,16 @@ case class BlockValueSerializer(cons: (IndexedSeq[BlockItem], Value[SType]) => V override def parse(r: SigmaByteReader): Value[SType] = { val itemsSize = r.getUInt().toIntExact - val values = (1 to itemsSize).map(_ => r.getValue().asInstanceOf[BlockItem]) + val values: IndexedSeq[BlockItem] = if (itemsSize == 0) + BlockItem.EmptySeq + else { + // @hotspot: allocate new array only if it is not empty + val buf = new Array[BlockItem](itemsSize) + cfor(0)(_ < itemsSize, _ + 1) { i => + buf(i) = r.getValue().asInstanceOf[BlockItem] + } + buf + } val result = r.getValue() cons(values, result) } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala index 719cb576cc..88959ccc2d 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala @@ -213,14 +213,19 @@ class ErgoTreeSerializer { private def deserializeConstants(header: Byte, r: SigmaByteReader): Array[Constant[SType]] = { val constants = if (ErgoTree.isConstantSegregation(header)) { val nConsts = r.getUInt().toInt - val res = new Array[Constant[SType]](nConsts) - cfor(0)(_ < nConsts, _ + 1) { i => - res(i) = constantSerializer.deserialize(r) + if (nConsts > 0) { + // @hotspot: allocate new array only if it is not empty + val res = new Array[Constant[SType]](nConsts) + cfor(0)(_ < nConsts, _ + 1) { i => + res(i) = constantSerializer.deserialize(r) + } + res } - res + else + Constant.EmptyArray } else - Array.empty[Constant[SType]] + Constant.EmptyArray constants } diff --git a/sigmastate/src/main/scala/sigmastate/trees.scala b/sigmastate/src/main/scala/sigmastate/trees.scala index eb56661cd2..159d96030f 100644 --- a/sigmastate/src/main/scala/sigmastate/trees.scala +++ b/sigmastate/src/main/scala/sigmastate/trees.scala @@ -15,7 +15,7 @@ import sigmastate.utxo.{Transformer, SimpleTransformerCompanion} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer - +import spire.syntax.all.cfor /** * Basic trait for inner nodes of crypto-trees, so AND/OR/THRESHOLD sigma-protocol connectives @@ -44,7 +44,9 @@ object CAND { def normalized(items: Seq[SigmaBoolean]): SigmaBoolean = { require(items.nonEmpty) val res = new ArrayBuffer[SigmaBoolean]() - for (x <- items) { + val nItems = items.length + cfor(0)(_ < nItems, _ + 1) { i => + val x = items(i) x match { case FalseProp => return FalseProp case TrueProp => // skip @@ -70,7 +72,9 @@ object COR { def normalized(items: Seq[SigmaBoolean]): SigmaBoolean = { require(items.nonEmpty) val res = new ArrayBuffer[SigmaBoolean]() - for (x <- items) { + val nItems = items.length + cfor(0)(_ < nItems, _ + 1) { i => + val x = items(i) x match { case FalseProp => // skip case TrueProp => return TrueProp From 48cc739a77ca884456ed56759ce7b09f816a140b Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Sat, 3 Oct 2020 00:18:00 +0300 Subject: [PATCH 06/19] memory bound IRContext: avoid duplicate and empty array allocations --- core/src/main/scala/scalan/Base.scala | 15 +++- .../main/scala/scalan/primitives/Equal.scala | 8 +- .../scala/scalan/primitives/Functions.scala | 11 +-- .../scala/scalan/primitives/LogicalOps.scala | 20 +++-- .../scala/scalan/primitives/NumericOps.scala | 46 +++++++--- .../scala/scalan/primitives/OrderingOps.scala | 28 ++++-- .../main/scala/scalan/primitives/Thunks.scala | 10 ++- .../scala/scalan/primitives/UnBinOps.scala | 15 +++- .../scalan/primitives/UniversalOps.scala | 8 +- .../main/scala/scalan/staged/AstGraphs.scala | 56 +++++++----- .../main/scala/special/sigma/TestBigInt.scala | 2 +- .../special/sigma/TestGroupElement.scala | 2 +- .../scala/org/ergoplatform/ErgoAddress.scala | 2 +- .../src/main/scala/sigmastate/Values.scala | 17 +++- .../sigmastate/eval/RuntimeCosting.scala | 90 ++++++++++++++----- .../src/main/scala/sigmastate/trees.scala | 4 +- .../scala/sigmastate/utxo/transformers.scala | 6 +- .../scala/special/sigma/SigmaDslTesting.scala | 2 +- 18 files changed, 245 insertions(+), 97 deletions(-) diff --git a/core/src/main/scala/scalan/Base.scala b/core/src/main/scala/scalan/Base.scala index 75b0b0a5de..4ff59033de 100644 --- a/core/src/main/scala/scalan/Base.scala +++ b/core/src/main/scala/scalan/Base.scala @@ -92,7 +92,7 @@ abstract class Base { scalan: Scalan => _elements(i + 1) = element Def.extractSyms(element, symsBuf) } - _syms = symsBuf.toArray() + _syms = if (symsBuf.length > 0) symsBuf.toArray() else EmptyArrayOfSym } /** References to other nodes in this Def instance. @@ -815,5 +815,18 @@ abstract class Base { scalan: Scalan => } while (res != currSym) res } + + val EmptyArrayOfSym = Array.empty[Sym] + val EmptySeqOfSym: Seq[Sym] = EmptyArrayOfSym + def EmptyDBufferOfSym: DBuffer[Sym] = DBuffer.unsafe(EmptyArrayOfSym) } +object Base { + val EmptyArrayOfInt = Array.empty[Int] + val EmptySeqOfInt: Seq[Int] = EmptyArrayOfInt + val EmptyDSetOfInt: debox.Set[Int] = debox.Set.empty + + /** WARNING! Since it is mutable, special care should be taken to not change this buffer. + * @hotspot used heavily in scheduling and to avoid allocations*/ + def EmptyDBufferOfInt: debox.Buffer[Int] = debox.Buffer.unsafe(EmptyArrayOfInt) +} diff --git a/core/src/main/scala/scalan/primitives/Equal.scala b/core/src/main/scala/scalan/primitives/Equal.scala index 32dc525f89..e2a35acf15 100644 --- a/core/src/main/scala/scalan/primitives/Equal.scala +++ b/core/src/main/scala/scalan/primitives/Equal.scala @@ -4,10 +4,14 @@ import scalan.{Base, Scalan} trait Equal extends Base { self: Scalan => /** Binary operation representing structural equality between arguments. */ - case class Equals[A: Elem]() extends BinOp[A, Boolean]("==", equalValues[A](_, _)) + case class Equals[A: Elem]() extends BinOp[A, Boolean]("==") { + override def applySeq(x: A, y: A): Boolean = equalValues[A](x, y) + } /** Binary operation representing structural inequality between arguments. */ - case class NotEquals[A: Elem]() extends BinOp[A, Boolean]("!=", !equalValues[A](_, _)) + case class NotEquals[A: Elem]() extends BinOp[A, Boolean]("!=") { + override def applySeq(x: A, y: A): Boolean = !equalValues[A](x, y) + } protected def equalValues[A](x: Any, y: Any)(implicit eA: Elem[A]) = x == y diff --git a/core/src/main/scala/scalan/primitives/Functions.scala b/core/src/main/scala/scalan/primitives/Functions.scala index 1fc9844344..fc75631ffe 100644 --- a/core/src/main/scala/scalan/primitives/Functions.scala +++ b/core/src/main/scala/scalan/primitives/Functions.scala @@ -120,7 +120,7 @@ trait Functions extends Base with ProgramGraphs { self: Scalan => override lazy val scheduleIds: DBuffer[Int] = { val sch = if (isIdentity) - DBuffer.ofSize[Int](0) + Base.EmptyDBufferOfInt else { // graph g will contain all Defs reified as part of this Lambda, (due to `filterNode`) // BUT not all of them depend on boundVars, thus we need to filter them out @@ -165,15 +165,6 @@ trait Functions extends Base with ProgramGraphs { self: Scalan => } override protected def getDeps: Array[Sym] = freeVars.toArray - - def isGlobalLambda: Boolean = { - freeVars.forall { x => - x.isConst || { - val xIsGlobalLambda = x.isLambda && { val lam = x.node.asInstanceOf[Lambda[_, _]]; lam.isGlobalLambda } - xIsGlobalLambda - } - } - } } type LambdaData[A,B] = (Lambda[A,B], Nullable[Ref[A] => Ref[B]], Ref[A], Ref[B]) diff --git a/core/src/main/scala/scalan/primitives/LogicalOps.scala b/core/src/main/scala/scalan/primitives/LogicalOps.scala index fd71800d95..881de678b6 100644 --- a/core/src/main/scala/scalan/primitives/LogicalOps.scala +++ b/core/src/main/scala/scalan/primitives/LogicalOps.scala @@ -3,15 +3,25 @@ package scalan.primitives import scalan.{Base, Scalan} trait LogicalOps extends Base { self: Scalan => - val And = new EndoBinOp[Boolean]("&&", _ && _) + val And = new EndoBinOp[Boolean]("&&") { + override def applySeq(x: Boolean, y: Boolean): Boolean = x && y + } - val Or = new EndoBinOp[Boolean]("||", _ || _) + val Or = new EndoBinOp[Boolean]("||") { + override def applySeq(x: Boolean, y: Boolean): Boolean = x || y + } - val Not = new EndoUnOp[Boolean]("!", !_) + val Not = new EndoUnOp[Boolean]("!") { + override def applySeq(x: Boolean): Boolean = !x + } - val BinaryXorOp = new EndoBinOp[Boolean]("^", _ ^ _) + val BinaryXorOp = new EndoBinOp[Boolean]("^") { + override def applySeq(x: Boolean, y: Boolean): Boolean = x ^ y + } - val BooleanToInt = new UnOp[Boolean, Int]("ToInt", if (_) 1 else 0) + val BooleanToInt = new UnOp[Boolean, Int]("ToInt") { + override def applySeq(x: Boolean): Int = if (x) 1 else 0 + } implicit class RepBooleanOps(value: Ref[Boolean]) { def &&(y: Ref[Boolean]): Ref[Boolean] = And(value, y) diff --git a/core/src/main/scala/scalan/primitives/NumericOps.scala b/core/src/main/scala/scalan/primitives/NumericOps.scala index 70216627d4..11a7c34c02 100644 --- a/core/src/main/scala/scalan/primitives/NumericOps.scala +++ b/core/src/main/scala/scalan/primitives/NumericOps.scala @@ -26,31 +26,53 @@ trait NumericOps extends Base { self: Scalan => def numeric[T:ExactNumeric]: ExactNumeric[T] = implicitly[ExactNumeric[T]] def integral[T:ExactIntegral]: ExactIntegral[T] = implicitly[ExactIntegral[T]] - case class NumericPlus[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("+", n.plus) + case class NumericPlus[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("+") { + override def applySeq(x: T, y: T): T = n.plus(x, y) + } - case class NumericMinus[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("-", n.minus) + case class NumericMinus[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("-") { + override def applySeq(x: T, y: T): T = n.minus(x, y) + } - case class NumericTimes[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("*", n.times) + case class NumericTimes[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("*") { + override def applySeq(x: T, y: T): T = n.times(x, y) + } - class DivOp[T: Elem](opName: String, applySeq: (T, T) => T, n: ExactIntegral[T]) extends EndoBinOp[T](opName, applySeq) { + abstract class DivOp[T: Elem](opName: String, n: ExactIntegral[T]) extends EndoBinOp[T](opName) { override def shouldPropagate(lhs: T, rhs: T) = rhs != n.zero } - case class NumericNegate[T: Elem](n: ExactNumeric[T]) extends UnOp[T, T]("-", n.negate) + case class NumericNegate[T: Elem](n: ExactNumeric[T]) extends UnOp[T, T]("-") { + override def applySeq(x: T): T = n.negate(x) + } - case class NumericToDouble[T](n: ExactNumeric[T]) extends UnOp[T,Double]("ToDouble", n.toDouble) + case class NumericToDouble[T](n: ExactNumeric[T]) extends UnOp[T,Double]("ToDouble") { + override def applySeq(x: T): Double = n.toDouble(x) + } - case class NumericToFloat[T](n: ExactNumeric[T]) extends UnOp[T, Float]("ToFloat", n.toFloat) + case class NumericToFloat[T](n: ExactNumeric[T]) extends UnOp[T, Float]("ToFloat") { + override def applySeq(x: T): Float = n.toFloat(x) + } - case class NumericToInt[T](n: ExactNumeric[T]) extends UnOp[T,Int]("ToInt", n.toInt) + case class NumericToInt[T](n: ExactNumeric[T]) extends UnOp[T,Int]("ToInt") { + override def applySeq(x: T): Int = n.toInt(x) + } - case class NumericToLong[T](n: ExactNumeric[T]) extends UnOp[T,Long]("ToLong", n.toLong) + case class NumericToLong[T](n: ExactNumeric[T]) extends UnOp[T,Long]("ToLong") { + override def applySeq(x: T): Long = n.toLong(x) + } - case class Abs[T: Elem](n: ExactNumeric[T]) extends UnOp[T, T]("Abs", n.abs) + case class Abs[T: Elem](n: ExactNumeric[T]) extends UnOp[T, T]("Abs") { + override def applySeq(x: T): T = n.abs(x) + } - case class IntegralDivide[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("/", i.quot, i) + case class IntegralDivide[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("/", i) { + override def applySeq(x: T, y: T): T = i.quot(x, y) + } - case class IntegralMod[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("%", i.rem, i) + case class IntegralMod[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("%", i) { + override def applySeq(x: T, y: T): T = i.rem(x, y) + } @inline final def isZero[T](x: T, n: ExactNumeric[T]) = x == n.zero @inline final def isOne[T](x: T, n: ExactNumeric[T]) = x == n.fromInt(1) diff --git a/core/src/main/scala/scalan/primitives/OrderingOps.scala b/core/src/main/scala/scalan/primitives/OrderingOps.scala index 19045be96a..cea48ea52e 100644 --- a/core/src/main/scala/scalan/primitives/OrderingOps.scala +++ b/core/src/main/scala/scalan/primitives/OrderingOps.scala @@ -16,17 +16,31 @@ trait OrderingOps extends Base { self: Scalan => def compare(rhs: Ref[T]): Ref[Int] = OrderingCompare(n).apply(lhs,rhs) } - case class OrderingLT[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean]("<", ord.lt) + case class OrderingLT[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean]("<") { + override def applySeq(x: T, y: T): Boolean = ord.lt(x, y) + } - case class OrderingLTEQ[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean]("<=", ord.lteq) + case class OrderingLTEQ[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean]("<=") { + override def applySeq(x: T, y: T): Boolean = ord.lteq(x, y) + } - case class OrderingGT[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean](">", ord.gt) + case class OrderingGT[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean](">") { + override def applySeq(x: T, y: T): Boolean = ord.gt(x, y) + } - case class OrderingGTEQ[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean](">=", ord.gteq) + case class OrderingGTEQ[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean](">=") { + override def applySeq(x: T, y: T): Boolean = ord.gteq(x, y) + } - case class OrderingMax[T: Elem](ord: ExactOrdering[T]) extends BinOp[T, T]("max", ord.max) + case class OrderingMax[T: Elem](ord: ExactOrdering[T]) extends BinOp[T, T]("max") { + override def applySeq(x: T, y: T): T = ord.max(x, y) + } - case class OrderingMin[T: Elem](ord: ExactOrdering[T]) extends BinOp[T, T]("min", ord.min) + case class OrderingMin[T: Elem](ord: ExactOrdering[T]) extends BinOp[T, T]("min") { + override def applySeq(x: T, y: T): T = ord.min(x, y) + } - case class OrderingCompare[T](ord: ExactOrdering[T]) extends BinOp[T, Int]("compare", ord.compare) + case class OrderingCompare[T](ord: ExactOrdering[T]) extends BinOp[T, Int]("compare") { + override def applySeq(x: T, y: T): Int = ord.compare(x, y) + } } \ No newline at end of file diff --git a/core/src/main/scala/scalan/primitives/Thunks.scala b/core/src/main/scala/scalan/primitives/Thunks.scala index 739285f648..ff5af4e504 100644 --- a/core/src/main/scala/scalan/primitives/Thunks.scala +++ b/core/src/main/scala/scalan/primitives/Thunks.scala @@ -73,7 +73,7 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => cachedElemByClass(eItem)(classOf[ThunkElem[T]]) implicit def extendThunkElement[T](elem: Elem[Thunk[T]]): ThunkElem[T] = elem.asInstanceOf[ThunkElem[T]] - class ThunkDef[A](val root: Ref[A], _scheduleIds: =>ScheduleIds) + class ThunkDef[A](val root: Ref[A], _scheduleIds: => ScheduleIds) extends AstGraph with Def[Thunk[A]] { implicit def eA: Elem[A] = root.elem @@ -110,12 +110,14 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => } def productArity: Int = 1 - override def boundVars = Nil - override lazy val freeVars: Seq[Sym] = if (schedule.isEmpty) Array(root) else super.freeVars + override def boundVars = EmptySeqOfSym + + val roots: Seq[Sym] = Array(root) + + override lazy val freeVars: Seq[Sym] = if (schedule.isEmpty) roots else super.freeVars override protected def getDeps: Array[Sym] = freeVars.toArray - val roots: Seq[Sym] = Array(root) override lazy val rootIds: DBuffer[Int] = super.rootIds override def isIdentity: Boolean = false } diff --git a/core/src/main/scala/scalan/primitives/UnBinOps.scala b/core/src/main/scala/scalan/primitives/UnBinOps.scala index ed690e7aa6..e7fa4249c9 100644 --- a/core/src/main/scala/scalan/primitives/UnBinOps.scala +++ b/core/src/main/scala/scalan/primitives/UnBinOps.scala @@ -4,17 +4,28 @@ import scalan.{Scalan, Base} trait UnBinOps extends Base { self: Scalan => - class UnOp[A, R](val opName: String, val applySeq: A => R)(implicit val eResult: Elem[R]) { + abstract class UnOp[A, R](val opName: String)(implicit val eResult: Elem[R]) { override def toString = opName + /** Called as part of graph interpretation to execute the given unary operation. + * @param x operation argument + * @return result of applying this operation to x + */ + def applySeq(x: A): R def apply(arg: Ref[A]) = applyUnOp(this, arg) def shouldPropagate(arg: A) = true } - class BinOp[A, R](val opName: String, val applySeq: (A, A) => R)(implicit val eResult: Elem[R]) { + abstract class BinOp[A, R](val opName: String)(implicit val eResult: Elem[R]) { override def toString = opName + /** Called as part of graph interpretation to execute the given binary operation. + * @param x operation argument + * @param y operation argument + * @return result of applying this operation to (x, y) + */ + def applySeq(x: A, y: A): R def apply(lhs: Ref[A], rhs: Ref[A]) = applyBinOp(this, lhs, rhs) def applyLazy(lhs: Ref[A], rhs: Ref[Thunk[A]]) = applyBinOpLazy(this, lhs, rhs) diff --git a/core/src/main/scala/scalan/primitives/UniversalOps.scala b/core/src/main/scala/scalan/primitives/UniversalOps.scala index 30aeb28dae..eaca5821ce 100644 --- a/core/src/main/scala/scalan/primitives/UniversalOps.scala +++ b/core/src/main/scala/scalan/primitives/UniversalOps.scala @@ -3,9 +3,13 @@ package scalan.primitives import scalan.{Base, Scalan} trait UniversalOps extends Base { scalan: Scalan => - case class HashCode[A]() extends UnOp[A, Int]("hashCode", _.hashCode) + case class HashCode[A]() extends UnOp[A, Int]("hashCode") { + override def applySeq(x: A): Int = x.hashCode + } - case class ToString[A]() extends UnOp[A, String]("toString", _.toString) + case class ToString[A]() extends UnOp[A, String]("toString") { + override def applySeq(x: A): String = x.toString + } /** Represents calculation of size in bytes of the given value. * The descriptor value.elem can be used to decompose value into components. diff --git a/core/src/main/scala/scalan/staged/AstGraphs.scala b/core/src/main/scala/scalan/staged/AstGraphs.scala index a9967bf100..e140b3d653 100644 --- a/core/src/main/scala/scalan/staged/AstGraphs.scala +++ b/core/src/main/scala/scalan/staged/AstGraphs.scala @@ -1,7 +1,7 @@ package scalan.staged import scala.collection._ -import scalan.Scalan +import scalan.{Scalan, Base} import scalan.compilation.GraphVizConfig import spire.syntax.all.cfor import debox.{Set => DSet, Buffer => DBuffer, Map => DMap} @@ -70,11 +70,16 @@ trait AstGraphs extends Transforming { self: Scalan => } } } - val res = new Array[Sym](resIds.length) - cfor(0)(_ < resIds.length, _ + 1) { i => - res(i) = getSym(resIds(i)) + val resLength = resIds.length + if (resLength == 0) + EmptySeqOfSym + else { + val res = new Array[Sym](resLength) + cfor(0)(_ < resLength, _ + 1) { i => + res(i) = getSym(resIds(i)) + } + res } - res } /** Schedule represents a body of compound definition - topologically ordered @@ -86,21 +91,31 @@ trait AstGraphs extends Transforming { self: Scalan => /** Sequence of node references forming a schedule. * @hotspot don't beautify the code */ lazy val schedule: Schedule = { - val len = scheduleIds.length - val res = new Array[Sym](len) - cfor(0)(_ < len, _ + 1) { i => - res(i) = getSym(scheduleIds(i)) + val ids = scheduleIds + val len = ids.length + if (len == 0) EmptySeqOfSym + else { + val res = new Array[Sym](len) + cfor(0)(_ < len, _ + 1) { i => + res(i) = getSym(ids(i)) + } + res } - res } /** Set of symbol ids in the schedule. Can be used to quickly recognize * symbols belonging to the body of this definition. */ lazy val domain: DSet[Int] = { - val res = DSet.ofSize[Int](scheduleIds.length) - res ++= scheduleIds.toArray - res + val ids = scheduleIds + val len = ids.length + if (len == 0) { + Base.EmptyDSetOfInt + } else { + val res = DSet.ofSize[Int](len) + res ++= ids.toArray + res + } } /** Whether this graph represents identity function. */ @@ -132,7 +147,10 @@ trait AstGraphs extends Transforming { self: Scalan => lazy val flatSchedule: Schedule = { val flatBuf = DBuffer.ofSize[Sym](schedule.length) buildFlatSchedule(schedule, flatBuf) - flatBuf.toArray + if (flatBuf.length > 0) + flatBuf.toArray + else + EmptySeqOfSym } /** Build usage information induced by the given schedule. @@ -180,21 +198,15 @@ trait AstGraphs extends Transforming { self: Scalan => def globalUsagesOf(s: Sym): DBuffer[Sym] = allNodes.get(s.node.nodeId) match { case Some(node) => node.outSyms - case None => DBuffer.empty[Sym] + case None => EmptyDBufferOfSym } def hasManyUsagesGlobal(s: Sym): Boolean = globalUsagesOf(s).length > 1 - /** This empty buffer is returned every time the usages are requested for the node - * that is not in usageMap. - * WARNING! Since it is mutable, special care should be taken to not change this buffer. - * @hotspot used havily in scheduling */ - private val NoUsages = DBuffer.unsafe(new Array[Int](0)) - /** @hotspot for performance we return mutable structure, but it should never be changed. */ def usagesOf(id: Int): DBuffer[Int] = { val node = usageMap.getOrElse(id, null) - if (node == null) return NoUsages + if (node == null) return Base.EmptyDBufferOfInt node.usages } diff --git a/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala b/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala index b23cea1e81..8b5f44170f 100644 --- a/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala +++ b/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala @@ -5,7 +5,7 @@ import java.math.BigInteger import scalan.util.Extensions.BigIntegerOps abstract class TestBigInt(private[sigma] val value: BigInteger) extends BigInt { - val dsl: TestSigmaDslBuilder = new TestSigmaDslBuilder + def dsl: TestSigmaDslBuilder override def toByte : Byte = value.byteValueExact() override def toShort: Short = value.shortValueExact() diff --git a/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala b/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala index bf57777773..9c21791fab 100644 --- a/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala +++ b/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala @@ -4,7 +4,7 @@ import org.bouncycastle.math.ec.ECPoint import special.collection.Coll abstract class TestGroupElement(private[sigma] val value: ECPoint) extends GroupElement { - val dsl: TestSigmaDslBuilder = new TestSigmaDslBuilder + def dsl: TestSigmaDslBuilder override def toString: String = s"GroupElement(${Extensions.showECPoint(value)})" diff --git a/sigmastate/src/main/scala/org/ergoplatform/ErgoAddress.scala b/sigmastate/src/main/scala/org/ergoplatform/ErgoAddress.scala index 6adc685cc6..1116b9fbf4 100644 --- a/sigmastate/src/main/scala/org/ergoplatform/ErgoAddress.scala +++ b/sigmastate/src/main/scala/org/ergoplatform/ErgoAddress.scala @@ -262,7 +262,7 @@ case class ErgoAddressEncoder(networkPrefix: NetworkPrefix) { import ErgoAddressEncoder._ /** This value is be used implicitly in the methods below. */ - implicit private val ergoAddressEncoder: ErgoAddressEncoder = this + implicit private def ergoAddressEncoder: ErgoAddressEncoder = this /** Converts the given [[ErgoAddress]] to Base58 string. */ def toString(address: ErgoAddress): String = { diff --git a/sigmastate/src/main/scala/sigmastate/Values.scala b/sigmastate/src/main/scala/sigmastate/Values.scala index 268b3c3a95..6e02cdba22 100644 --- a/sigmastate/src/main/scala/sigmastate/Values.scala +++ b/sigmastate/src/main/scala/sigmastate/Values.scala @@ -37,6 +37,7 @@ import sigmastate.lang.SourceContext import special.collection.Coll import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer object Values { @@ -676,8 +677,11 @@ object Values { // NOTE, the assert below should be commented before production release. // Is it there for debuging only, basically to catch call stacks where the fancy types may // occasionally be used. -// assert(items.isInstanceOf[mutable.WrappedArray[_]] || items.isInstanceOf[mutable.IndexedSeq[_]], -// s"Invalid types of items ${items.getClass}") + assert( + items.isInstanceOf[mutable.WrappedArray[_]] || + items.isInstanceOf[ArrayBuffer[_]] || + items.isInstanceOf[mutable.ArraySeq[_]], + s"Invalid types of items ${items.getClass}") private val isBooleanConstants = elementType == SBoolean && items.forall(_.isInstanceOf[Constant[_]]) override def companion = if (isBooleanConstants) ConcreteCollectionBooleanConstant @@ -821,7 +825,14 @@ object Values { */ case class FuncValue(args: IndexedSeq[(Int,SType)], body: Value[SType]) extends NotReadyValue[SFunc] { override def companion = FuncValue - lazy val tpe: SFunc = SFunc(args.toArray.map(_._2), body.tpe) + lazy val tpe: SFunc = { + val nArgs = args.length + val argTypes = new Array[SType](nArgs) + cfor(0)(_ < nArgs, _ + 1) { i => + argTypes(i) = args(i)._2 + } + SFunc(argTypes, body.tpe) + } /** This is not used as operation, but rather to form a program structure */ override def opType: SFunc = SFunc(mutable.WrappedArray.empty, tpe) } diff --git a/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala b/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala index 9cf0178843..feebb66c6d 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala @@ -1601,10 +1601,17 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => RCCostedPrim(v, opCost(v, Array(lC.cost, rC.cost), costOf(node)), SizeBigInt) case OR(input) => input match { - case ConcreteCollection(items, tpe) => - val itemsC = items.map(item => eval(adaptSigmaBoolean(item))) - val res = sigmaDslBuilder.anyOf(colBuilder.fromItems(itemsC.map(_.value): _*)) - val costs = itemsC.map(_.cost) + case ConcreteCollection(items, _) => + val len = items.length + val values = new Array[Ref[Boolean]](len) + val costs = new Array[Ref[Int]](len) + cfor(0)(_ < len, _ + 1) { i => + val item = items(i) + val itemC = eval(adaptSigmaBoolean(item)) + values(i) = itemC.value + costs(i) = itemC.cost + } + val res = sigmaDslBuilder.anyOf(colBuilder.fromItems(values: _*)) val nOps = costs.length - 1 val cost = opCost(res, costs, perItemCostOf(node, nOps)) withConstantSize(res, cost) @@ -1617,10 +1624,17 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => } case AND(input) => input match { - case ConcreteCollection(items, tpe) => - val itemsC = items.map(item => eval(adaptSigmaBoolean(item))) - val res = sigmaDslBuilder.allOf(colBuilder.fromItems(itemsC.map(_.value): _*)) - val costs = itemsC.map(_.cost) + case ConcreteCollection(items, _) => + val len = items.length + val values = new Array[Ref[Boolean]](len) + val costs = new Array[Ref[Int]](len) + cfor(0)(_ < len, _ + 1) { i => + val item = items(i) + val itemC = eval(adaptSigmaBoolean(item)) + values(i) = itemC.value + costs(i) = itemC.cost + } + val res = sigmaDslBuilder.allOf(colBuilder.fromItems(values: _*)) val nOps = costs.length - 1 val cost = opCost(res, costs, perItemCostOf(node, nOps)) withConstantSize(res, cost) @@ -1633,10 +1647,17 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => } case XorOf(input) => input match { - case ConcreteCollection(items, tpe) => - val itemsC = items.map(item => eval(item)) - val res = sigmaDslBuilder.xorOf(colBuilder.fromItems(itemsC.map(_.value): _*)) - val costs = itemsC.map(_.cost) + case ConcreteCollection(items, _) => + val len = items.length + val values = new Array[Ref[Boolean]](len) + val costs = new Array[Ref[Int]](len) + cfor(0)(_ < len, _ + 1) { i => + val item = items(i) + val itemC = eval(adaptSigmaBoolean(item)) + values(i) = itemC.value + costs(i) = itemC.cost + } + val res = sigmaDslBuilder.xorOf(colBuilder.fromItems(values: _*)) val nOps = costs.length - 1 val cost = opCost(res, costs, perItemCostOf(node, nOps)) withConstantSize(res, cost) @@ -1680,16 +1701,30 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => } case SigmaAnd(items) => - val itemsC = items.map(eval) - val res = sigmaDslBuilder.allZK(colBuilder.fromItems(itemsC.map(s => asRep[SigmaProp](s.value)): _*)) - val costs = itemsC.map(_.cost) + val len = items.length + val values = new Array[Ref[SigmaProp]](len) + val costs = new Array[Ref[Int]](len) + cfor(0)(_ < len, _ + 1) { i => + val item = items(i) + val itemC = eval(item) + values(i) = asRep[SigmaProp](itemC.value) + costs(i) = itemC.cost + } + val res = sigmaDslBuilder.allZK(colBuilder.fromItems(values: _*)) val cost = opCost(res, costs, perItemCostOf(node, costs.length)) RCCostedPrim(res, cost, SizeSigmaProposition) case SigmaOr(items) => - val itemsC = items.map(eval) - val res = sigmaDslBuilder.anyZK(colBuilder.fromItems(itemsC.map(s => asRep[SigmaProp](s.value)): _*)) - val costs = itemsC.map(_.cost) + val len = items.length + val values = new Array[Ref[SigmaProp]](len) + val costs = new Array[Ref[Int]](len) + cfor(0)(_ < len, _ + 1) { i => + val item = items(i) + val itemC = eval(item) + values(i) = asRep[SigmaProp](itemC.value) + costs(i) = itemC.cost + } + val res = sigmaDslBuilder.anyZK(colBuilder.fromItems(values: _*)) val cost = opCost(res, costs, perItemCostOf(node, costs.length)) RCCostedPrim(res, cost, SizeSigmaProposition) @@ -1802,8 +1837,23 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => // fallback rule for MethodCall, should be the last case in the list case Terms.MethodCall(obj, method, args, typeSubst) if method.objType.coster.isDefined => val objC = eval(obj) - val argsC = args.map(eval) - val elems = typeSubst.values.toSeq.map(tpe => liftElem(stypeToElem(tpe).asInstanceOf[Elem[Any]])) + val argsC = { + val len = args.length + val res = new Array[RCosted[SType#WrappedType]](len) + cfor(0)(_ < len, _ + 1) { i => + res(i) = eval(args(i)) + } + res + } + val elems = { + val ts = typeSubst.values.toArray + val len = ts.length + val res = new Array[Sym](len) + cfor(0)(_ < len, _ + 1) { i => + res(i) = liftElem(stypeToElem(ts(i)).asInstanceOf[Elem[Any]]) + } + res + } method.objType.coster.get(IR)(objC, method, argsC, elems) case _ => diff --git a/sigmastate/src/main/scala/sigmastate/trees.scala b/sigmastate/src/main/scala/sigmastate/trees.scala index 159d96030f..f78f6cf44b 100644 --- a/sigmastate/src/main/scala/sigmastate/trees.scala +++ b/sigmastate/src/main/scala/sigmastate/trees.scala @@ -157,11 +157,11 @@ case class CreateAvlTree(operationFlags: ByteValue, valueLengthOpt: Value[SIntOption]) extends AvlTreeValue { override def companion = CreateAvlTree override def tpe = SAvlTree - override def opType = CreateAvlTree.opType + override def opType = CreateAvlTree.OpType } object CreateAvlTree extends ValueCompanion { override def opCode: OpCode = OpCodes.AvlTreeCode - val opType = SFunc(IndexedSeq(SByte, SByteArray, SInt, SIntOption), SAvlTree) + val OpType = SFunc(Array(SByte, SByteArray, SInt, SIntOption), SAvlTree) } /** ErgoTree operation to create a new SigmaProp value representing public key diff --git a/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala b/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala index bed8d10335..1384b4b89f 100644 --- a/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala +++ b/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala @@ -231,10 +231,14 @@ case class ExtractRegisterAs[V <: SType]( input: Value[SBox.type], override val tpe: SOption[V]) extends Extract[SOption[V]] with NotReadyValue[SOption[V]] { override def companion = ExtractRegisterAs - override val opType = SFunc(Array(SBox, SByte), tpe) + override val opType = SFunc(ExtractRegisterAs.BoxAndByte, tpe) } object ExtractRegisterAs extends ValueCompanion { override def opCode: OpCode = OpCodes.ExtractRegisterAs + + //@hotspot: avoids 10^6 allocations + private val BoxAndByte: IndexedSeq[SType] = Array(SBox, SByte) + def apply[V <: SType](input: Value[SBox.type], registerId: RegisterId)(implicit tpe: V): ExtractRegisterAs[V] = ExtractRegisterAs(input, registerId, SOption(tpe)) diff --git a/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala b/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala index 80e9eb0c8c..a70d462a0e 100644 --- a/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala +++ b/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala @@ -305,7 +305,7 @@ class SigmaDslTesting extends PropSpec // Add additional oparations which are not yet implemented in ErgoScript compiler val multisig = sigmastate.AtLeast( IntConstant(2), - Seq( + Array( pkAlice, DeserializeRegister(ErgoBox.R5, SSigmaProp), // deserialize pkBob DeserializeContext(2, SSigmaProp))) // deserialize pkCarol From 5bb6c02c2011b3f13427284e05e8f8a8a2d8ca7f Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Mon, 5 Oct 2020 18:19:55 +0300 Subject: [PATCH 07/19] memory bound IRContext: allow any IndexedSeq types --- sigmastate/src/main/scala/sigmastate/Values.scala | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/sigmastate/src/main/scala/sigmastate/Values.scala b/sigmastate/src/main/scala/sigmastate/Values.scala index 6e02cdba22..dfac1d9406 100644 --- a/sigmastate/src/main/scala/sigmastate/Values.scala +++ b/sigmastate/src/main/scala/sigmastate/Values.scala @@ -677,11 +677,12 @@ object Values { // NOTE, the assert below should be commented before production release. // Is it there for debuging only, basically to catch call stacks where the fancy types may // occasionally be used. - assert( - items.isInstanceOf[mutable.WrappedArray[_]] || - items.isInstanceOf[ArrayBuffer[_]] || - items.isInstanceOf[mutable.ArraySeq[_]], - s"Invalid types of items ${items.getClass}") +// assert( +// items.isInstanceOf[mutable.WrappedArray[_]] || +// items.isInstanceOf[ArrayBuffer[_]] || +// items.isInstanceOf[mutable.ArraySeq[_]], +// s"Invalid types of items ${items.getClass}") + private val isBooleanConstants = elementType == SBoolean && items.forall(_.isInstanceOf[Constant[_]]) override def companion = if (isBooleanConstants) ConcreteCollectionBooleanConstant From 8fb6dd5af2dff2ac50577711faa3da2efeee8f5e Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Wed, 7 Oct 2020 15:48:34 +0300 Subject: [PATCH 08/19] memory bound IRContext: optimize serializers --- .../src/main/scala/sigmastate/Values.scala | 4 ++++ .../scala/sigmastate/eval/TreeBuilding.scala | 2 +- .../scala/sigmastate/lang/SigmaTyper.scala | 12 ++++++------ ...teCollectionBooleanConstantSerializer.scala | 13 +++++++++---- .../ConcreteCollectionSerializer.scala | 15 ++++++++++----- .../serialization/FuncValueSerializer.scala | 11 +++++------ .../serialization/MethodCallSerializer.scala | 15 +++++++++++---- .../serialization/PropertyCallSerializer.scala | 6 +++--- .../serialization/TupleSerializer.scala | 6 +++++- .../serialization/ValDefSerializer.scala | 2 +- .../src/main/scala/sigmastate/types.scala | 4 ++++ .../sigmastate/utils/SigmaByteReader.scala | 16 ++++++++++------ .../sigmastate/lang/SigmaCompilerTest.scala | 4 ++-- .../scala/sigmastate/lang/SigmaTyperTest.scala | 18 +++++++++--------- 14 files changed, 80 insertions(+), 48 deletions(-) diff --git a/sigmastate/src/main/scala/sigmastate/Values.scala b/sigmastate/src/main/scala/sigmastate/Values.scala index dfac1d9406..3c517c089e 100644 --- a/sigmastate/src/main/scala/sigmastate/Values.scala +++ b/sigmastate/src/main/scala/sigmastate/Values.scala @@ -115,6 +115,10 @@ object Values { } def notSupportedError(v: SValue, opName: String) = throw new IllegalArgumentException(s"Method $opName is not supported for node $v") + + /** Immutable values used in many places which allows to avoid allocations. */ + val EmptyArray = Array.empty[SValue] + val EmptySeq: IndexedSeq[SValue] = EmptyArray } trait ValueCompanion extends SigmaNodeCompanion { diff --git a/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala b/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala index dc3fc73cc3..0e8253391a 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/TreeBuilding.scala @@ -254,7 +254,7 @@ trait TreeBuilding extends RuntimeCosting { IR: IRContext => case (mth @ SCollection.ZipMethod, Seq(coll)) => val typeSubst = Map(SCollection.tOV -> coll.asCollection[SType].tpe.elemType) typeSubst - case (mth, _) => SigmaTyper.emptySubst + case (mth, _) => SigmaTyper.EmptySubst } val specMethod = method.withConcreteTypes(typeSubst + (SCollection.tIV -> colTpe.elemType)) builder.mkMethodCall(col, specMethod, args.toIndexedSeq, Map()) diff --git a/sigmastate/src/main/scala/sigmastate/lang/SigmaTyper.scala b/sigmastate/src/main/scala/sigmastate/lang/SigmaTyper.scala index e1b6c1df03..79f24ab509 100644 --- a/sigmastate/src/main/scala/sigmastate/lang/SigmaTyper.scala +++ b/sigmastate/src/main/scala/sigmastate/lang/SigmaTyper.scala @@ -36,9 +36,9 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe val global = Global.withPropagatedSrcCtx(srcCtx) val node = for { pf <- method.irInfo.irBuilder - res <- pf.lift((builder, global, method, args, emptySubst)) + res <- pf.lift((builder, global, method, args, EmptySubst)) } yield res - node.getOrElse(mkMethodCall(global, method, args, emptySubst).withPropagatedSrcCtx(srcCtx)) + node.getOrElse(mkMethodCall(global, method, args, EmptySubst).withPropagatedSrcCtx(srcCtx)) } /** * Rewrite tree to typed tree. Checks constituent names and types. Uses @@ -286,7 +286,7 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe case None => error(s"Invalid argument type of method call $mc : expected ${sfunc.tDom}; actual: $actualTypes", mc.sourceContext) } - case _ => emptySubst + case _ => EmptySubst } method.irInfo.irBuilder.flatMap(_.lift(builder, newObj, method, newArgs, typeSubst)) .getOrElse(mkMethodCall(newObj, method, newArgs, typeSubst)) @@ -614,7 +614,7 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe object SigmaTyper { type STypeSubst = Map[STypeVar, SType] - val emptySubst = Map.empty[STypeVar, SType] + val EmptySubst = Map.empty[STypeVar, SType] /** Performs pairwise type unification making sure each type variable is equally * substituted in all items. */ @@ -623,7 +623,7 @@ object SigmaTyper { val itemsUni = (items1, items2).zipped.map((t1, t2) => unifyTypes(t1,t2)) if (itemsUni.forall(_.isDefined)) { // merge substitutions making sure the same id is equally substituted in all items - val merged = itemsUni.foldLeft(emptySubst)((acc, subst) => { + val merged = itemsUni.foldLeft(EmptySubst)((acc, subst) => { var res = acc for ((id, t) <- subst.get) { if (res.contains(id) && res(id) != t) return None @@ -636,7 +636,7 @@ object SigmaTyper { None } - private val unifiedWithoutSubst = Some(emptySubst) + private val unifiedWithoutSubst = Some(EmptySubst) /** Finds a substitution `subst` of type variables such that unifyTypes(applySubst(t1, subst), t2) shouldBe Some(emptySubst) */ def unifyTypes(t1: SType, t2: SType): Option[STypeSubst] = (t1, t2) match { diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala index 8dd4b22be3..0aca68acd5 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala @@ -30,10 +30,15 @@ case class ConcreteCollectionBooleanConstantSerializer(cons: (IndexedSeq[Value[S override def parse(r: SigmaByteReader): Value[SCollection[SBoolean.type]] = { val size = r.getUShort() // READ val bits = r.getBits(size) // READ - val items = new Array[BoolValue](size) - cfor(0)(_ < size, _ + 1) { i => - items(i) = BooleanConstant.fromBoolean(bits(i)) + if (size == 0) { + // reusing pre-allocated immutable instances + cons(Value.EmptySeq.asInstanceOf[IndexedSeq[Value[SBoolean.type]]], SBoolean) + } else { + val items = new Array[BoolValue](size) + cfor(0)(_ < size, _ + 1) { i => + items(i) = BooleanConstant.fromBoolean(bits(i)) + } + cons(items, SBoolean) } - cons(items, SBoolean) } } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala index d5e41465a1..9a54904803 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala @@ -25,11 +25,16 @@ case class ConcreteCollectionSerializer(cons: (IndexedSeq[Value[SType]], SType) override def parse(r: SigmaByteReader): Value[SCollection[SType]] = { val size = r.getUShort() // READ val tItem = r.getType() // READ - val values = new Array[SValue](size) - cfor(0)(_ < size, _ + 1) { i => - values(i) = r.getValue() // READ + if (size == 0) { + // reusing pre-allocated immutable instances + cons(Value.EmptySeq, tItem) + } else { + val values = new Array[SValue](size) + cfor(0)(_ < size, _ + 1) { i => + values(i) = r.getValue() // READ + } + assert(values.forall(_.tpe == tItem), s"Invalid type of collection value in $values") + cons(values, tItem) } - assert(values.forall(_.tpe == tItem), s"Invalid type of collection value in $values") - cons(values, tItem) } } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala index a5e41fdbb2..5448963a65 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala @@ -6,8 +6,7 @@ import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import ValueSerializer._ import sigmastate.utils.SigmaByteWriter.{DataInfo, U, Vlq} - -import scala.collection.mutable +import spire.syntax.all.cfor case class FuncValueSerializer(cons: (IndexedSeq[(Int, SType)], Value[SType]) => Value[SType]) extends ValueSerializer[FuncValue] { @@ -28,14 +27,14 @@ case class FuncValueSerializer(cons: (IndexedSeq[(Int, SType)], Value[SType]) => override def parse(r: SigmaByteReader): Value[SType] = { val argsSize = r.getUInt().toIntExact - val argsBuilder = mutable.ArrayBuilder.make[(Int, SType)]() - for (_ <- 0 until argsSize) { + val args = new Array[(Int, SType)](argsSize) + cfor(0)(_ < argsSize, _ + 1) { i => val id = r.getUInt().toInt val tpe = r.getType() r.valDefTypeStore(id) = tpe - argsBuilder += ((id, tpe)) + args(i) = (id, tpe) } val body = r.getValue() - cons(argsBuilder.result(), body) + cons(args, body) } } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala index 17dbc964f6..99f4dc29e8 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala @@ -49,10 +49,17 @@ case class MethodCallSerializer(cons: (Value[SType], SMethod, IndexedSeq[Value[S val complexity = ComplexityTable.MethodCallComplexity.getOrElse((typeId, methodId), ComplexityTable.MinimalComplexity) r.addComplexity(complexity) val nArgs = args.length - val types = new Array[SType](nArgs) - cfor(0)(_ < nArgs, _ + 1) { i => - types(i) = args(i).tpe - } + + val types: Seq[SType] = + if (nArgs == 0) SType.EmptySeq + else { + val types = new Array[SType](nArgs) + cfor(0)(_ < nArgs, _ + 1) { i => + types(i) = args(i).tpe + } + types + } + val specMethod = method.specializeFor(obj.tpe, types) cons(obj, specMethod, args, Map.empty) } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/PropertyCallSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/PropertyCallSerializer.scala index f382c203e1..3a2ae84002 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/PropertyCallSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/PropertyCallSerializer.scala @@ -2,6 +2,7 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ +import sigmastate.lang.SigmaTyper import sigmastate.lang.SigmaTyper.STypeSubst import sigmastate.lang.Terms.{PropertyCall, MethodCall} import sigmastate.utils.SigmaByteWriter.DataInfo @@ -26,11 +27,10 @@ case class PropertyCallSerializer(cons: (Value[SType], SMethod, IndexedSeq[Value val typeId = r.getByte() val methodId = r.getByte() val obj = r.getValue() - val args = IndexedSeq() val method = SMethod.fromIds(typeId, methodId) val complexity = ComplexityTable.MethodCallComplexity.getOrElse((typeId, methodId), ComplexityTable.MinimalComplexity) r.addComplexity(complexity) - val specMethod = method.specializeFor(obj.tpe, args) - cons(obj, specMethod, args, Map()) + val specMethod = method.specializeFor(obj.tpe, SType.EmptySeq) + cons(obj, specMethod, Value.EmptySeq, SigmaTyper.EmptySubst) } } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala index 0578f4dc66..3ca0f3f15a 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala @@ -5,6 +5,7 @@ import sigmastate.Values._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import ValueSerializer._ import sigmastate.utils.SigmaByteWriter.{DataInfo, U} +import spire.syntax.all.cfor case class TupleSerializer(cons: Seq[Value[SType]] => Value[SType]) extends ValueSerializer[Tuple] { @@ -22,7 +23,10 @@ case class TupleSerializer(cons: Seq[Value[SType]] => Value[SType]) override def parse(r: SigmaByteReader): Value[SType] = { val size = r.getByte() - val values = (1 to size).map(_ => r.getValue()) + val values = new Array[SValue](size) + cfor(0)(_ < size, _ + 1) { i => + values(i) = r.getValue() + } cons(values) } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ValDefSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/ValDefSerializer.scala index 8d83ea0b21..e485df7580 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ValDefSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ValDefSerializer.scala @@ -39,7 +39,7 @@ case class ValDefSerializer(override val opDesc: ValueCompanion) extends ValueSe } inputs case ValDefCode => - Nil + STypeVar.EmptySeq } val rhs = r.getValue() r.valDefTypeStore(id) = rhs.tpe diff --git a/sigmastate/src/main/scala/sigmastate/types.scala b/sigmastate/src/main/scala/sigmastate/types.scala index 2cec907904..fb020c841d 100644 --- a/sigmastate/src/main/scala/sigmastate/types.scala +++ b/sigmastate/src/main/scala/sigmastate/types.scala @@ -151,6 +151,8 @@ object SType { val IndexedSeqOfT1: IndexedSeq[SType] = Array(SType.tT) val IndexedSeqOfT2: IndexedSeq[SType] = Array(SType.tT, SType.tT) + val EmptyArray = Array.empty[SType] + val EmptySeq: IndexedSeq[SType] = EmptyArray /** All pre-defined types should be listed here. Note, NoType is not listed. * Should be in sync with sigmastate.lang.Types.predefTypes. */ @@ -1336,6 +1338,8 @@ case class STypeVar(name: String) extends SType { object STypeVar { val TypeCode: TypeCode = 103: Byte implicit def liftString(n: String): STypeVar = STypeVar(n) + val EmptyArray = Array.empty[STypeVar] + val EmptySeq: IndexedSeq[STypeVar] = EmptyArray } case object SBox extends SProduct with SPredefType with SMonoType { diff --git a/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala b/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala index 1d679c3b53..47ee83c405 100644 --- a/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala +++ b/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala @@ -2,10 +2,11 @@ package sigmastate.utils import scorex.util.serialization.Reader import sigmastate.SType -import sigmastate.Values.SValue -import sigmastate.lang.exceptions.{DeserializeCallDepthExceeded, InputSizeLimitExceeded} +import sigmastate.Values.{SValue, Value} +import sigmastate.lang.exceptions.{InputSizeLimitExceeded, DeserializeCallDepthExceeded} import sigmastate.serialization._ import scorex.util.Extensions._ +import spire.syntax.all.cfor class SigmaByteReader(val r: Reader, var constantStore: ConstantStore, @@ -119,11 +120,14 @@ class SigmaByteReader(val r: Reader, @inline def getValues(): IndexedSeq[SValue] = { val size = getUInt().toIntExact - val xs = new Array[SValue](size) - for (i <- 0 until size) { - xs(i) = getValue() + if (size == 0) Value.EmptySeq + else { + val xs = new Array[SValue](size) + cfor(0)(_ < size, _ + 1) { i => + xs(i) = getValue() + } + xs } - xs } private var positionLmt: Int = r.position + r.remaining diff --git a/sigmastate/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala b/sigmastate/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala index a2f005ba0f..786c31eeef 100644 --- a/sigmastate/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala +++ b/sigmastate/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala @@ -87,8 +87,8 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ObjectGe } property("global methods") { - comp(env, "{ groupGenerator }") shouldBe MethodCall(Global, SGlobal.groupGeneratorMethod, IndexedSeq(), SigmaTyper.emptySubst) - comp(env, "{ Global.groupGenerator }") shouldBe MethodCall(Global, SGlobal.groupGeneratorMethod, IndexedSeq(), SigmaTyper.emptySubst) + comp(env, "{ groupGenerator }") shouldBe MethodCall(Global, SGlobal.groupGeneratorMethod, IndexedSeq(), SigmaTyper.EmptySubst) + comp(env, "{ Global.groupGenerator }") shouldBe MethodCall(Global, SGlobal.groupGeneratorMethod, IndexedSeq(), SigmaTyper.EmptySubst) comp(env, "{ Global.xor(arr1, arr2) }") shouldBe Xor(ByteArrayConstant(arr1), ByteArrayConstant(arr2)) comp(env, "{ xor(arr1, arr2) }") shouldBe Xor(ByteArrayConstant(arr1), ByteArrayConstant(arr2)) } diff --git a/sigmastate/src/test/scala/sigmastate/lang/SigmaTyperTest.scala b/sigmastate/src/test/scala/sigmastate/lang/SigmaTyperTest.scala index 00286d5f25..4926824bb1 100644 --- a/sigmastate/src/test/scala/sigmastate/lang/SigmaTyperTest.scala +++ b/sigmastate/src/test/scala/sigmastate/lang/SigmaTyperTest.scala @@ -312,12 +312,12 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan property("compute unifying type substitution: prim types") { import SigmaTyper._ forAll { t: SPredefType => - unifyTypes(t, t) shouldBe Some(emptySubst) - unifyTypes(SAny, t) shouldBe Some(emptySubst) - unifyTypes(SAny, SCollection(t)) shouldBe Some(emptySubst) - unifyTypes(SCollection(SAny), SCollection(t)) shouldBe Some(emptySubst) - unifyTypes(SCollection(SAny), STuple(t, t, t)) shouldBe Some(emptySubst) - unifyTypes(SCollection(SAny), STuple(t, STuple(t, t))) shouldBe Some(emptySubst) + unifyTypes(t, t) shouldBe Some(EmptySubst) + unifyTypes(SAny, t) shouldBe Some(EmptySubst) + unifyTypes(SAny, SCollection(t)) shouldBe Some(EmptySubst) + unifyTypes(SCollection(SAny), SCollection(t)) shouldBe Some(EmptySubst) + unifyTypes(SCollection(SAny), STuple(t, t, t)) shouldBe Some(EmptySubst) + unifyTypes(SCollection(SAny), STuple(t, STuple(t, t))) shouldBe Some(EmptySubst) } } @@ -327,11 +327,11 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan unifyTypes(t1, t2) shouldBe exp exp match { case Some(subst) => - unifyTypes(applySubst(t1, subst), t2) shouldBe Some(emptySubst) + unifyTypes(applySubst(t1, subst), t2) shouldBe Some(EmptySubst) case None => } } - def check(s1: String, s2: String, exp: Option[STypeSubst] = Some(emptySubst)): Unit = { + def check(s1: String, s2: String, exp: Option[STypeSubst] = Some(EmptySubst)): Unit = { val t1 = ty(s1); val t2 = ty(s2) checkTypes(t1, t2, exp) } @@ -408,7 +408,7 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan "((Int,Int), Coll[Boolean] => Coll[(Coll[C], Boolean)]) => Int", ("A", SInt), ("B", SBoolean)) - unifyTypes(SBoolean, SSigmaProp) shouldBe Some(emptySubst) + unifyTypes(SBoolean, SSigmaProp) shouldBe Some(EmptySubst) unifyTypes(SSigmaProp, SBoolean) shouldBe None check("(Int, Boolean)", "(Int, SigmaProp)") check("(Int, Boolean, Boolean)", "(Int, SigmaProp, SigmaProp)") From b1d36394fe169c1859317f60d2de262b37f8f8d3 Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Mon, 12 Oct 2020 11:46:35 +0300 Subject: [PATCH 09/19] memory bound IRContext: optimize serializers (part 2) --- ...ConcreteCollectionBooleanConstantSerializer.scala | 8 +++++--- .../serialization/ConcreteCollectionSerializer.scala | 12 +++++++----- .../sigmastate/serialization/TupleSerializer.scala | 2 +- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala index 0aca68acd5..500056251a 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala @@ -30,15 +30,17 @@ case class ConcreteCollectionBooleanConstantSerializer(cons: (IndexedSeq[Value[S override def parse(r: SigmaByteReader): Value[SCollection[SBoolean.type]] = { val size = r.getUShort() // READ val bits = r.getBits(size) // READ - if (size == 0) { + val items: IndexedSeq[Value[SBoolean.type]] = if (size == 0) { // reusing pre-allocated immutable instances - cons(Value.EmptySeq.asInstanceOf[IndexedSeq[Value[SBoolean.type]]], SBoolean) + Value.EmptySeq.asInstanceOf[IndexedSeq[Value[SBoolean.type]]] } else { val items = new Array[BoolValue](size) cfor(0)(_ < size, _ + 1) { i => items(i) = BooleanConstant.fromBoolean(bits(i)) } - cons(items, SBoolean) + items } + cons(items, SBoolean) } + } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala index 9a54904803..82c3e24f22 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala @@ -25,16 +25,18 @@ case class ConcreteCollectionSerializer(cons: (IndexedSeq[Value[SType]], SType) override def parse(r: SigmaByteReader): Value[SCollection[SType]] = { val size = r.getUShort() // READ val tItem = r.getType() // READ - if (size == 0) { + val values: IndexedSeq[Value[SType]] = if (size == 0) { // reusing pre-allocated immutable instances - cons(Value.EmptySeq, tItem) + Value.EmptySeq } else { val values = new Array[SValue](size) cfor(0)(_ < size, _ + 1) { i => - values(i) = r.getValue() // READ + val v = r.getValue() // READ + values(i) = v + assert(v.tpe == tItem, s"Invalid type of collection value in $values") } - assert(values.forall(_.tpe == tItem), s"Invalid type of collection value in $values") - cons(values, tItem) + values } + cons(values, tItem) } } diff --git a/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala index 3ca0f3f15a..f579803f62 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/TupleSerializer.scala @@ -23,7 +23,7 @@ case class TupleSerializer(cons: Seq[Value[SType]] => Value[SType]) override def parse(r: SigmaByteReader): Value[SType] = { val size = r.getByte() - val values = new Array[SValue](size) + val values = new Array[SValue](size) // assume size > 0 so always create a new array cfor(0)(_ < size, _ + 1) { i => values(i) = r.getValue() } From fda477c9de568e6a2002612d4c1e65d395b54c98 Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Wed, 7 Oct 2020 21:26:57 +0300 Subject: [PATCH 10/19] memory bound IRContext: more optimizations and ScalaDocs --- common/src/main/scala/scalan/package.scala | 16 ++++++++- core/src/main/scala/scalan/Base.scala | 23 +++++++----- .../scala/scalan/primitives/Functions.scala | 4 +-- .../scala/scalan/primitives/LogicalOps.scala | 8 +++++ .../scala/scalan/primitives/NumericOps.scala | 22 ++++++++++++ .../scala/scalan/primitives/OrderingOps.scala | 9 +++++ .../main/scala/scalan/primitives/Thunks.scala | 1 + .../scala/scalan/primitives/UnBinOps.scala | 24 +++++++++++-- .../main/scala/scalan/staged/AstGraphs.scala | 8 ++--- .../main/scala/special/sigma/TestBigInt.scala | 1 + .../special/sigma/TestGroupElement.scala | 1 + .../src/main/scala/sigmastate/Values.scala | 14 ++++++-- .../scala/sigmastate/eval/Evaluation.scala | 6 ++-- .../sigmastate/eval/RuntimeCosting.scala | 36 +++++++++++-------- .../main/scala/sigmastate/lang/Terms.scala | 4 +++ .../serialization/ErgoTreeSerializer.scala | 29 +++++++-------- .../src/main/scala/sigmastate/trees.scala | 14 ++++++++ .../src/main/scala/sigmastate/types.scala | 8 +++++ .../sigmastate/utils/SigmaByteReader.scala | 7 +++- .../scala/sigmastate/utxo/transformers.scala | 2 +- .../utxo/examples/LetsSpecification.scala | 3 +- 21 files changed, 183 insertions(+), 57 deletions(-) diff --git a/common/src/main/scala/scalan/package.scala b/common/src/main/scala/scalan/package.scala index ab705c62e8..f89cc78d53 100644 --- a/common/src/main/scala/scalan/package.scala +++ b/common/src/main/scala/scalan/package.scala @@ -5,5 +5,19 @@ package object scalan { /** Allows implicit resolution to find appropriate instance of ClassTag in * the scope where RType is implicitly available. */ implicit def rtypeToClassTag[A](implicit t: RType[A]): ClassTag[A] = t.classTag - + + /** Immutable empty array of integers, should be used instead of allocating new empty arrays. */ + val EmptyArrayOfInt = Array.empty[Int] + + /** Immutable empty Seq[Int] backed by empty array. + * You should prefer using it instead of `Seq[Int]()` or `Seq.empty[Int]` + */ + val EmptySeqOfInt: Seq[Int] = EmptyArrayOfInt + + /** Create a new empty buffer around pre-allocated empty array. + * This method is preferred, rather that creating empty debox.Buffer directly + * because it allows to avoid allocation of the empty array. + */ + def emptyDBufferOfInt: debox.Buffer[Int] = debox.Buffer.unsafe(EmptyArrayOfInt) + } diff --git a/core/src/main/scala/scalan/Base.scala b/core/src/main/scala/scalan/Base.scala index 4ff59033de..dd1b4b59fd 100644 --- a/core/src/main/scala/scalan/Base.scala +++ b/core/src/main/scala/scalan/Base.scala @@ -816,17 +816,22 @@ abstract class Base { scalan: Scalan => res } + /** Immutable empty array of symbols, can be used to avoid unnecessary allocations. */ val EmptyArrayOfSym = Array.empty[Sym] + + /** Immutable empty Seq, can be used to avoid unnecessary allocations. */ val EmptySeqOfSym: Seq[Sym] = EmptyArrayOfSym - def EmptyDBufferOfSym: DBuffer[Sym] = DBuffer.unsafe(EmptyArrayOfSym) -} -object Base { - val EmptyArrayOfInt = Array.empty[Int] - val EmptySeqOfInt: Seq[Int] = EmptyArrayOfInt - val EmptyDSetOfInt: debox.Set[Int] = debox.Set.empty + /** Create a new empty buffer around pre-allocated empty array. + * This method is preferred, rather that creating empty debox.Buffer directly + * because it allows to avoid allocation of the empty array. + */ + @inline final def emptyDBufferOfSym: DBuffer[Sym] = DBuffer.unsafe(EmptyArrayOfSym) - /** WARNING! Since it is mutable, special care should be taken to not change this buffer. - * @hotspot used heavily in scheduling and to avoid allocations*/ - def EmptyDBufferOfInt: debox.Buffer[Int] = debox.Buffer.unsafe(EmptyArrayOfInt) + /** Used internally in IR and should be used with care since it is mutable. + * At the same time, it is used in the hotspot and allows to avoid roughly tens of + * thousands of allocations per second. + * WARNING: Mutations of this instance can lead to undefined behavior. + */ + protected val EmptyDSetOfInt: debox.Set[Int] = debox.Set.empty } diff --git a/core/src/main/scala/scalan/primitives/Functions.scala b/core/src/main/scala/scalan/primitives/Functions.scala index fc75631ffe..0acad6ca81 100644 --- a/core/src/main/scala/scalan/primitives/Functions.scala +++ b/core/src/main/scala/scalan/primitives/Functions.scala @@ -4,7 +4,7 @@ import java.util import scalan.staged.ProgramGraphs import scalan.util.GraphUtil -import scalan.{Lazy, Base, Nullable, Scalan} +import scalan.{Nullable, emptyDBufferOfInt, Base, Lazy, Scalan} import debox.{Buffer => DBuffer} import scala.language.implicitConversions @@ -120,7 +120,7 @@ trait Functions extends Base with ProgramGraphs { self: Scalan => override lazy val scheduleIds: DBuffer[Int] = { val sch = if (isIdentity) - Base.EmptyDBufferOfInt + emptyDBufferOfInt else { // graph g will contain all Defs reified as part of this Lambda, (due to `filterNode`) // BUT not all of them depend on boundVars, thus we need to filter them out diff --git a/core/src/main/scala/scalan/primitives/LogicalOps.scala b/core/src/main/scala/scalan/primitives/LogicalOps.scala index 881de678b6..e81b546139 100644 --- a/core/src/main/scala/scalan/primitives/LogicalOps.scala +++ b/core/src/main/scala/scalan/primitives/LogicalOps.scala @@ -2,27 +2,34 @@ package scalan.primitives import scalan.{Base, Scalan} +/** Slice in Scala cake with definitions of logical operations. */ trait LogicalOps extends Base { self: Scalan => + /** Logical AND binary operation. */ val And = new EndoBinOp[Boolean]("&&") { override def applySeq(x: Boolean, y: Boolean): Boolean = x && y } + /** Logical AND binary operation. */ val Or = new EndoBinOp[Boolean]("||") { override def applySeq(x: Boolean, y: Boolean): Boolean = x || y } + /** Logical NOT unary operation. */ val Not = new EndoUnOp[Boolean]("!") { override def applySeq(x: Boolean): Boolean = !x } + /** Logical XOR binary operation. */ val BinaryXorOp = new EndoBinOp[Boolean]("^") { override def applySeq(x: Boolean, y: Boolean): Boolean = x ^ y } + /** Boolean to Int conversion unary operation. */ val BooleanToInt = new UnOp[Boolean, Int]("ToInt") { override def applySeq(x: Boolean): Int = if (x) 1 else 0 } + /** Extension methods over `Ref[Boolean]`. */ implicit class RepBooleanOps(value: Ref[Boolean]) { def &&(y: Ref[Boolean]): Ref[Boolean] = And(value, y) def ||(y: Ref[Boolean]): Ref[Boolean] = Or(value, y) @@ -36,6 +43,7 @@ trait LogicalOps extends Base { self: Scalan => } + /** Helper method which defines rewriting rules with boolean constants. */ @inline final def rewriteBoolConsts(lhs: Sym, rhs: Sym, ifTrue: Sym => Sym, ifFalse: Sym => Sym, ifEqual: Sym => Sym, ifNegated: Sym => Sym): Sym = lhs match { diff --git a/core/src/main/scala/scalan/primitives/NumericOps.scala b/core/src/main/scala/scalan/primitives/NumericOps.scala index 11a7c34c02..5999ee10f2 100644 --- a/core/src/main/scala/scalan/primitives/NumericOps.scala +++ b/core/src/main/scala/scalan/primitives/NumericOps.scala @@ -2,7 +2,10 @@ package scalan.primitives import scalan.{ExactNumeric, Base, Scalan, ExactIntegral} +/** Slice in Scala cake with definitions of numeric operations. */ trait NumericOps extends Base { self: Scalan => + + /** Extension methods over `Ref[T]` where T is instance of ExactNumeric type-class. */ implicit class NumericOpsCls[T](x: Ref[T])(implicit val n: ExactNumeric[T]) { def +(y: Ref[T]): Ref[T] = NumericPlus(n)(x.elem).apply(x, y) def -(y: Ref[T]): Ref[T] = NumericMinus(n)(x.elem).apply(x, y) @@ -15,6 +18,7 @@ trait NumericOps extends Base { self: Scalan => def toLong: Ref[Long] = NumericToLong(n).apply(x) } + /** Extension methods over `Ref[T]` where T is instance of ExactIntegral type-class. */ implicit class IntegralOpsCls[T](x: Ref[T])(implicit i: ExactIntegral[T]) { def div(y: Ref[T]): Ref[T] = IntegralDivide(i)(x.elem).apply(x, y) def mod(y: Ref[T]): Ref[T] = IntegralMod(i)(x.elem).apply(x, y) @@ -23,57 +27,75 @@ trait NumericOps extends Base { self: Scalan => def %(y: Ref[T]): Ref[T] = mod(y) } + /** Return an ExactNumeric for a given type T. */ def numeric[T:ExactNumeric]: ExactNumeric[T] = implicitly[ExactNumeric[T]] + + /** Return an ExactIntegral for a given type T. */ def integral[T:ExactIntegral]: ExactIntegral[T] = implicitly[ExactIntegral[T]] + /** Descriptor of binary `+` operation. */ case class NumericPlus[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("+") { override def applySeq(x: T, y: T): T = n.plus(x, y) } + /** Descriptor of binary `-` operation. */ case class NumericMinus[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("-") { override def applySeq(x: T, y: T): T = n.minus(x, y) } + /** Descriptor of binary `*` operation. */ case class NumericTimes[T: Elem](n: ExactNumeric[T]) extends EndoBinOp[T]("*") { override def applySeq(x: T, y: T): T = n.times(x, y) } + /** Base class for descriptors of binary division operations. */ abstract class DivOp[T: Elem](opName: String, n: ExactIntegral[T]) extends EndoBinOp[T](opName) { override def shouldPropagate(lhs: T, rhs: T) = rhs != n.zero } + /** Descriptor of unary `-` operation. */ case class NumericNegate[T: Elem](n: ExactNumeric[T]) extends UnOp[T, T]("-") { override def applySeq(x: T): T = n.negate(x) } + /** Descriptor of unary `ToDouble` conversion operation. */ case class NumericToDouble[T](n: ExactNumeric[T]) extends UnOp[T,Double]("ToDouble") { override def applySeq(x: T): Double = n.toDouble(x) } + /** Descriptor of unary `ToFloat` conversion operation. */ case class NumericToFloat[T](n: ExactNumeric[T]) extends UnOp[T, Float]("ToFloat") { override def applySeq(x: T): Float = n.toFloat(x) } + /** Descriptor of unary `ToInt` conversion operation. */ case class NumericToInt[T](n: ExactNumeric[T]) extends UnOp[T,Int]("ToInt") { override def applySeq(x: T): Int = n.toInt(x) } + /** Descriptor of unary `ToLong` conversion operation. */ case class NumericToLong[T](n: ExactNumeric[T]) extends UnOp[T,Long]("ToLong") { override def applySeq(x: T): Long = n.toLong(x) } + /** Descriptor of unary `abs` operation. */ case class Abs[T: Elem](n: ExactNumeric[T]) extends UnOp[T, T]("Abs") { override def applySeq(x: T): T = n.abs(x) } + /** Descriptor of binary `/` operation (integral division). */ case class IntegralDivide[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("/", i) { override def applySeq(x: T, y: T): T = i.quot(x, y) } + /** Descriptor of binary `%` operation (reminder of integral division). */ case class IntegralMod[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("%", i) { override def applySeq(x: T, y: T): T = i.rem(x, y) } + /** Compares the given value with zero of the given ExactNumeric instance. */ @inline final def isZero[T](x: T, n: ExactNumeric[T]) = x == n.zero + + /** Compares the given value with 1 of the given ExactNumeric instance. */ @inline final def isOne[T](x: T, n: ExactNumeric[T]) = x == n.fromInt(1) } diff --git a/core/src/main/scala/scalan/primitives/OrderingOps.scala b/core/src/main/scala/scalan/primitives/OrderingOps.scala index cea48ea52e..7d997e1487 100644 --- a/core/src/main/scala/scalan/primitives/OrderingOps.scala +++ b/core/src/main/scala/scalan/primitives/OrderingOps.scala @@ -2,10 +2,12 @@ package scalan.primitives import scalan.{Base, Scalan, ExactOrdering} +/** Slice in Scala cake with definitions of comparison operations. */ trait OrderingOps extends Base { self: Scalan => implicit def repOrderingToOrderingOps[T](x: Ref[T])(implicit n: ExactOrdering[T]) = new OrderingOpsCls(x) implicit def OrderingToOrderingOps[T](x: T)(implicit n: ExactOrdering[T], et: Elem[T]) = new OrderingOpsCls(toRep(x)) + /** Extension method over `Ref[T]` given an instance of ExactOrdering for T. */ class OrderingOpsCls[T](lhs: Ref[T])(implicit val n: ExactOrdering[T]) { def <(rhs: Ref[T]) = OrderingLT(n).apply(lhs,rhs) def <=(rhs: Ref[T]) = OrderingLTEQ(n).apply(lhs,rhs) @@ -16,30 +18,37 @@ trait OrderingOps extends Base { self: Scalan => def compare(rhs: Ref[T]): Ref[Int] = OrderingCompare(n).apply(lhs,rhs) } + /** Descriptor of binary `<` operation. */ case class OrderingLT[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean]("<") { override def applySeq(x: T, y: T): Boolean = ord.lt(x, y) } + /** Descriptor of binary `<=` operation. */ case class OrderingLTEQ[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean]("<=") { override def applySeq(x: T, y: T): Boolean = ord.lteq(x, y) } + /** Descriptor of binary `>` operation. */ case class OrderingGT[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean](">") { override def applySeq(x: T, y: T): Boolean = ord.gt(x, y) } + /** Descriptor of binary `>=` operation. */ case class OrderingGTEQ[T](ord: ExactOrdering[T]) extends BinOp[T, Boolean](">=") { override def applySeq(x: T, y: T): Boolean = ord.gteq(x, y) } + /** Descriptor of binary `max` operation. */ case class OrderingMax[T: Elem](ord: ExactOrdering[T]) extends BinOp[T, T]("max") { override def applySeq(x: T, y: T): T = ord.max(x, y) } + /** Descriptor of binary `min` operation. */ case class OrderingMin[T: Elem](ord: ExactOrdering[T]) extends BinOp[T, T]("min") { override def applySeq(x: T, y: T): T = ord.min(x, y) } + /** Descriptor of binary `compare` operation. */ case class OrderingCompare[T](ord: ExactOrdering[T]) extends BinOp[T, Int]("compare") { override def applySeq(x: T, y: T): Int = ord.compare(x, y) } diff --git a/core/src/main/scala/scalan/primitives/Thunks.scala b/core/src/main/scala/scalan/primitives/Thunks.scala index ff5af4e504..6907b4a20d 100644 --- a/core/src/main/scala/scalan/primitives/Thunks.scala +++ b/core/src/main/scala/scalan/primitives/Thunks.scala @@ -10,6 +10,7 @@ import scalan.util.{Covariant, GraphUtil} import scala.collection.Seq +/** Slice in Scala cake with definitions of Thunk operations. */ trait Thunks extends Functions with GraphVizExport { self: Scalan => type Th[+T] = Ref[Thunk[T]] diff --git a/core/src/main/scala/scalan/primitives/UnBinOps.scala b/core/src/main/scala/scalan/primitives/UnBinOps.scala index e7fa4249c9..749556bf0e 100644 --- a/core/src/main/scala/scalan/primitives/UnBinOps.scala +++ b/core/src/main/scala/scalan/primitives/UnBinOps.scala @@ -4,6 +4,7 @@ import scalan.{Scalan, Base} trait UnBinOps extends Base { self: Scalan => + /** Base class for descriptors of unary operations. */ abstract class UnOp[A, R](val opName: String)(implicit val eResult: Elem[R]) { override def toString = opName /** Called as part of graph interpretation to execute the given unary operation. @@ -12,11 +13,14 @@ trait UnBinOps extends Base { self: Scalan => */ def applySeq(x: A): R + /** Builds a new graph node by applying this operation to the given argument. */ def apply(arg: Ref[A]) = applyUnOp(this, arg) + /** Whether the constants should be propagated through this operations by rewriting. */ def shouldPropagate(arg: A) = true } + /** Base class for descriptors of binary operations. */ abstract class BinOp[A, R](val opName: String)(implicit val eResult: Elem[R]) { override def toString = opName @@ -26,33 +30,49 @@ trait UnBinOps extends Base { self: Scalan => * @return result of applying this operation to (x, y) */ def applySeq(x: A, y: A): R + + /** Builds a new graph node by applying this operation to the given arguments. */ def apply(lhs: Ref[A], rhs: Ref[A]) = applyBinOp(this, lhs, rhs) + + /** Builds a new graph node by applying this operation to the given arguments. + * This is a short-cuting (aka lazy) version of the operation, where the lazyness is + * represented by Thunk. + */ def applyLazy(lhs: Ref[A], rhs: Ref[Thunk[A]]) = applyBinOpLazy(this, lhs, rhs) - // ideally shouldn't be necessary, but - // we curently can't handle division by zero properly + /** Whether the constants should be propagated through this operations by rewriting. */ def shouldPropagate(lhs: A, rhs: A) = true } type EndoUnOp[A] = UnOp[A, A] type EndoBinOp[A] = BinOp[A, A] + /** Graph node which represents application of the given unary operation to the given argument. */ case class ApplyUnOp[A, R](op: UnOp[A, R], arg: Ref[A]) extends BaseDef[R]()(op.eResult) { override def toString = s"$op($arg)" override def transform(t: Transformer): Def[R] = ApplyUnOp[A,R](op, t(arg)) } + /** Graph node which represents application of the given binary operation to the given arguments. */ case class ApplyBinOp[A, R](op: BinOp[A, R], lhs: Ref[A], rhs: Ref[A]) extends BaseDef[R]()(op.eResult) { override def toString = s"$op($lhs, $rhs)" override def transform(t: Transformer): Def[R] = ApplyBinOp[A,R](op, t(lhs), t(rhs)) } + + /** Graph node which represents application of the given binary operation to the given arguments + * where the second argument is lazy. + */ case class ApplyBinOpLazy[A, R](op: BinOp[A, R], lhs: Ref[A], rhs: Ref[Thunk[A]]) extends BaseDef[R]()(op.eResult) { override def toString = s"$lhs $op { $rhs }" override def transform(t: Transformer): Def[R] = ApplyBinOpLazy[A,R](op, t(lhs), t(rhs)) } + /** Overridable constructor of an unary operation node. */ def applyUnOp[A, R](op: UnOp[A, R], arg: Ref[A]): Ref[R] = ApplyUnOp(op, arg) + /** Overridable constructor of a binary operation node. */ def applyBinOp[A, R](op: BinOp[A, R], lhs: Ref[A], rhs: Ref[A]): Ref[R] = ApplyBinOp(op, lhs, rhs) + + /** Overridable constructor of a binary operation node with lazy argument. */ def applyBinOpLazy[A, R](op: BinOp[A, R], lhs: Ref[A], rhs: Ref[Thunk[A]]): Ref[R] = ApplyBinOpLazy(op, lhs, rhs) } \ No newline at end of file diff --git a/core/src/main/scala/scalan/staged/AstGraphs.scala b/core/src/main/scala/scalan/staged/AstGraphs.scala index e140b3d653..dd801e25e3 100644 --- a/core/src/main/scala/scalan/staged/AstGraphs.scala +++ b/core/src/main/scala/scalan/staged/AstGraphs.scala @@ -1,7 +1,7 @@ package scalan.staged import scala.collection._ -import scalan.{Scalan, Base} +import scalan.{Base, Scalan, emptyDBufferOfInt} import scalan.compilation.GraphVizConfig import spire.syntax.all.cfor import debox.{Set => DSet, Buffer => DBuffer, Map => DMap} @@ -110,7 +110,7 @@ trait AstGraphs extends Transforming { self: Scalan => val ids = scheduleIds val len = ids.length if (len == 0) { - Base.EmptyDSetOfInt + EmptyDSetOfInt } else { val res = DSet.ofSize[Int](len) res ++= ids.toArray @@ -198,7 +198,7 @@ trait AstGraphs extends Transforming { self: Scalan => def globalUsagesOf(s: Sym): DBuffer[Sym] = allNodes.get(s.node.nodeId) match { case Some(node) => node.outSyms - case None => EmptyDBufferOfSym + case None => emptyDBufferOfSym } def hasManyUsagesGlobal(s: Sym): Boolean = globalUsagesOf(s).length > 1 @@ -206,7 +206,7 @@ trait AstGraphs extends Transforming { self: Scalan => /** @hotspot for performance we return mutable structure, but it should never be changed. */ def usagesOf(id: Int): DBuffer[Int] = { val node = usageMap.getOrElse(id, null) - if (node == null) return Base.EmptyDBufferOfInt + if (node == null) return emptyDBufferOfInt node.usages } diff --git a/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala b/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala index 8b5f44170f..423386051b 100644 --- a/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala +++ b/sigma-impl/src/main/scala/special/sigma/TestBigInt.scala @@ -4,6 +4,7 @@ import special.collection.Coll import java.math.BigInteger import scalan.util.Extensions.BigIntegerOps +// TODO refactor: this class should be removed before v5.0 abstract class TestBigInt(private[sigma] val value: BigInteger) extends BigInt { def dsl: TestSigmaDslBuilder diff --git a/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala b/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala index 9c21791fab..d8b84fd2f2 100644 --- a/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala +++ b/sigma-impl/src/main/scala/special/sigma/TestGroupElement.scala @@ -3,6 +3,7 @@ package special.sigma import org.bouncycastle.math.ec.ECPoint import special.collection.Coll +// TODO refactor: this class should be removed before v5.0 abstract class TestGroupElement(private[sigma] val value: ECPoint) extends GroupElement { def dsl: TestSigmaDslBuilder diff --git a/sigmastate/src/main/scala/sigmastate/Values.scala b/sigmastate/src/main/scala/sigmastate/Values.scala index 3c517c089e..b2a0409761 100644 --- a/sigmastate/src/main/scala/sigmastate/Values.scala +++ b/sigmastate/src/main/scala/sigmastate/Values.scala @@ -116,8 +116,10 @@ object Values { def notSupportedError(v: SValue, opName: String) = throw new IllegalArgumentException(s"Method $opName is not supported for node $v") - /** Immutable values used in many places which allows to avoid allocations. */ + /** Immutable empty array of values. Can be used to avoid allocation. */ val EmptyArray = Array.empty[SValue] + + /** Immutable empty Seq of values. Can be used to avoid allocation. */ val EmptySeq: IndexedSeq[SValue] = EmptyArray } @@ -189,10 +191,16 @@ object Values { object Constant extends ValueCompanion { override def opCode: OpCode = ConstantCode - /** Immutable empty array to save allocations in many places. */ + /** Immutable empty array, can be used to save allocations in many places. */ val EmptyArray = Array.empty[Constant[SType]] + /** Immutable empty IndexedSeq, can be used to save allocations in many places. */ + val EmptySeq: IndexedSeq[Constant[SType]] = Array.empty[Constant[SType]] + + /** Helper factory method. */ def apply[S <: SType](value: S#WrappedType, tpe: S): Constant[S] = ConstantNode(value, tpe) + + /** Recognizer of Constant tree nodes used in patterns. */ def unapply[S <: SType](v: EvaluatedValue[S]): Option[(S#WrappedType, S)] = v match { case ConstantNode(value, tpe) => Some((value, tpe)) case _ => None @@ -765,7 +773,7 @@ object Values { def isValDef: Boolean } object BlockItem { - /** Immutable empty array to save allocations in many places. */ + /** Immutable empty array, can be used to save allocations in many places. */ val EmptyArray = Array.empty[BlockItem] /** Immutable empty IndexedSeq to save allocations in many places. */ diff --git a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala index 87eff10139..6268ccde7a 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala @@ -88,10 +88,10 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => * from OpCodes + OpCodesExtra combined range. (See getOpCodeEx) */ protected def allowedOpCodesInCosting: HashSet[OpCodeExtra] = Evaluation.AllowedOpCodesInCosting - def isAllowedOpCodeInCosting(opCode: OpCodeExtra): Boolean = allowedOpCodesInCosting.contains(opCode) + /** Checks the given opCode belong to an operation allowed in cost function. */ + protected def isAllowedOpCodeInCosting(opCode: OpCodeExtra): Boolean = allowedOpCodesInCosting.contains(opCode) - /** Returns extended op code assigned to the given IR graph node. - */ + /** Returns extended op code assigned to the given IR graph node. */ def getOpCodeEx(d: Def[_]): OpCodeExtra = d match { case _: OpCost => OpCostCode case _: PerKbCostOf => PerKbCostOfCode diff --git a/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala b/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala index feebb66c6d..4d6369ed49 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala @@ -1837,23 +1837,29 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => // fallback rule for MethodCall, should be the last case in the list case Terms.MethodCall(obj, method, args, typeSubst) if method.objType.coster.isDefined => val objC = eval(obj) - val argsC = { - val len = args.length - val res = new Array[RCosted[SType#WrappedType]](len) - cfor(0)(_ < len, _ + 1) { i => - res(i) = eval(args(i)) + val argsC: Seq[RCosted[SType#WrappedType]] = + if (args.isEmpty) + EmptySeqOfSym.asInstanceOf[Seq[RCosted[SType#WrappedType]]] + else { + val len = args.length + val res = new Array[RCosted[SType#WrappedType]](len) + cfor(0)(_ < len, _ + 1) { i => + res(i) = eval(args(i)) + } + res } - res - } - val elems = { - val ts = typeSubst.values.toArray - val len = ts.length - val res = new Array[Sym](len) - cfor(0)(_ < len, _ + 1) { i => - res(i) = liftElem(stypeToElem(ts(i)).asInstanceOf[Elem[Any]]) + val elems: Seq[Sym] = + if (typeSubst.isEmpty) + EmptySeqOfSym + else { + val ts = typeSubst.values.toArray + val len = ts.length + val res = new Array[Sym](len) + cfor(0)(_ < len, _ + 1) { i => + res(i) = liftElem(stypeToElem(ts(i)).asInstanceOf[Elem[Any]]) + } + res } - res - } method.objType.coster.get(IR)(objC, method, argsC, elems) case _ => diff --git a/sigmastate/src/main/scala/sigmastate/lang/Terms.scala b/sigmastate/src/main/scala/sigmastate/lang/Terms.scala index 962fdde3a7..2b4bc61f11 100644 --- a/sigmastate/src/main/scala/sigmastate/lang/Terms.scala +++ b/sigmastate/src/main/scala/sigmastate/lang/Terms.scala @@ -109,6 +109,10 @@ object Terms { } // TODO HF: move to sigmastate.Values + /** ErgoTree node which represents application of function `func` to the given arguments. + * @param func expression which evaluates to a function + * @param args arguments of the function application + */ case class Apply(func: Value[SType], args: IndexedSeq[Value[SType]]) extends Value[SType] { override def companion = Apply override lazy val tpe: SType = func.tpe match { diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala b/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala index 88959ccc2d..279f0866b0 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala @@ -210,27 +210,28 @@ class ErgoTreeSerializer { /** Deserialize constants section only. * @hotspot don't beautify this code */ - private def deserializeConstants(header: Byte, r: SigmaByteReader): Array[Constant[SType]] = { - val constants = if (ErgoTree.isConstantSegregation(header)) { - val nConsts = r.getUInt().toInt - if (nConsts > 0) { - // @hotspot: allocate new array only if it is not empty - val res = new Array[Constant[SType]](nConsts) - cfor(0)(_ < nConsts, _ + 1) { i => - res(i) = constantSerializer.deserialize(r) + private def deserializeConstants(header: Byte, r: SigmaByteReader): IndexedSeq[Constant[SType]] = { + val constants: IndexedSeq[Constant[SType]] = + if (ErgoTree.isConstantSegregation(header)) { + val nConsts = r.getUInt().toInt + if (nConsts > 0) { + // @hotspot: allocate new array only if it is not empty + val res = new Array[Constant[SType]](nConsts) + cfor(0)(_ < nConsts, _ + 1) { i => + res(i) = constantSerializer.deserialize(r) + } + res } - res + else + Constant.EmptySeq } else - Constant.EmptyArray - } - else - Constant.EmptyArray + Constant.EmptySeq constants } /** Deserialize header and constant sections, but output the rest of the bytes as separate array. */ - def deserializeHeaderWithTreeBytes(r: SigmaByteReader): (Byte, Option[Int], Array[Constant[SType]], Array[Byte]) = { + def deserializeHeaderWithTreeBytes(r: SigmaByteReader): (Byte, Option[Int], IndexedSeq[Constant[SType]], Array[Byte]) = { val (header, sizeOpt) = deserializeHeaderAndSize(r) val constants = deserializeConstants(header, r) val treeBytes = r.getBytes(r.remaining) diff --git a/sigmastate/src/main/scala/sigmastate/trees.scala b/sigmastate/src/main/scala/sigmastate/trees.scala index f78f6cf44b..117d0308bd 100644 --- a/sigmastate/src/main/scala/sigmastate/trees.scala +++ b/sigmastate/src/main/scala/sigmastate/trees.scala @@ -41,6 +41,13 @@ case class CAND(override val children: Seq[SigmaBoolean]) extends SigmaConjectur object CAND { import TrivialProp._ + + /** Connects the given sigma propositions into CAND proposition performing + * partial evaluation when some of them are trivial propositioins. + * + * @param items propositions to combine into CAND + * @return CAND, TrueProp, FalseProp or even one of the items depending on partial evaluation + */ def normalized(items: Seq[SigmaBoolean]): SigmaBoolean = { require(items.nonEmpty) val res = new ArrayBuffer[SigmaBoolean]() @@ -69,6 +76,13 @@ case class COR(children: Seq[SigmaBoolean]) extends SigmaConjecture { object COR { import TrivialProp._ + + /** Connects the given sigma propositions into COR proposition performing + * partial evaluation when some of them are trivial propositioins. + * + * @param items propositions to combine into COR + * @return COR, TrueProp, FalseProp or even one of the items depending on partial evaluation + */ def normalized(items: Seq[SigmaBoolean]): SigmaBoolean = { require(items.nonEmpty) val res = new ArrayBuffer[SigmaBoolean]() diff --git a/sigmastate/src/main/scala/sigmastate/types.scala b/sigmastate/src/main/scala/sigmastate/types.scala index fb020c841d..1bc2017caf 100644 --- a/sigmastate/src/main/scala/sigmastate/types.scala +++ b/sigmastate/src/main/scala/sigmastate/types.scala @@ -151,7 +151,11 @@ object SType { val IndexedSeqOfT1: IndexedSeq[SType] = Array(SType.tT) val IndexedSeqOfT2: IndexedSeq[SType] = Array(SType.tT, SType.tT) + + /** Immutable empty array, can be used to avoid repeated allocations. */ val EmptyArray = Array.empty[SType] + + /** Immutable empty IndexedSeq, can be used to avoid repeated allocations. */ val EmptySeq: IndexedSeq[SType] = EmptyArray /** All pre-defined types should be listed here. Note, NoType is not listed. @@ -1338,7 +1342,11 @@ case class STypeVar(name: String) extends SType { object STypeVar { val TypeCode: TypeCode = 103: Byte implicit def liftString(n: String): STypeVar = STypeVar(n) + + /** Immutable empty array, can be used to avoid repeated allocations. */ val EmptyArray = Array.empty[STypeVar] + + /** Immutable empty IndexedSeq, can be used to avoid repeated allocations. */ val EmptySeq: IndexedSeq[STypeVar] = EmptyArray } diff --git a/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala b/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala index 47ee83c405..cf51dd8b5d 100644 --- a/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala +++ b/sigmastate/src/main/scala/sigmastate/utils/SigmaByteReader.scala @@ -118,9 +118,14 @@ class SigmaByteReader(val r: Reader, lvl = v } + /** Read sequence of values from this reader. + * It first reads the number of values and then reads each value using `getValue` method. + * + * @return a sequence of zero of more values read + */ @inline def getValues(): IndexedSeq[SValue] = { val size = getUInt().toIntExact - if (size == 0) Value.EmptySeq + if (size == 0) Value.EmptySeq // quick short-cut when there is nothing to read else { val xs = new Array[SValue](size) cfor(0)(_ < size, _ + 1) { i => diff --git a/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala b/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala index 1384b4b89f..2c79e001e6 100644 --- a/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala +++ b/sigmastate/src/main/scala/sigmastate/utxo/transformers.scala @@ -236,7 +236,7 @@ case class ExtractRegisterAs[V <: SType]( input: Value[SBox.type], object ExtractRegisterAs extends ValueCompanion { override def opCode: OpCode = OpCodes.ExtractRegisterAs - //@hotspot: avoids 10^6 allocations + //@hotspot: avoids thousands of allocations per second private val BoxAndByte: IndexedSeq[SType] = Array(SBox, SByte) def apply[V <: SType](input: Value[SBox.type], diff --git a/sigmastate/src/test/scala/sigmastate/utxo/examples/LetsSpecification.scala b/sigmastate/src/test/scala/sigmastate/utxo/examples/LetsSpecification.scala index c3af822427..97746d4381 100644 --- a/sigmastate/src/test/scala/sigmastate/utxo/examples/LetsSpecification.scala +++ b/sigmastate/src/test/scala/sigmastate/utxo/examples/LetsSpecification.scala @@ -166,8 +166,7 @@ import scala.util.Random some day this article will be continued! */ -class LetsSpecification extends SigmaTestingCommons { - suite => +class LetsSpecification extends SigmaTestingCommons { suite => // Not mixed with TestContext since it is not possible to call compiler.compile outside tests if mixed implicit lazy val IR: IRContext = new TestingIRContext From 6918a011487b84879edeb4eb3d3d7d6e77e07c61 Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Thu, 8 Oct 2020 00:32:50 +0300 Subject: [PATCH 11/19] memory bound IRContext: compiler fix --- sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala index 6268ccde7a..25701bda36 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala @@ -89,7 +89,7 @@ trait Evaluation extends RuntimeCosting { IR: IRContext => protected def allowedOpCodesInCosting: HashSet[OpCodeExtra] = Evaluation.AllowedOpCodesInCosting /** Checks the given opCode belong to an operation allowed in cost function. */ - protected def isAllowedOpCodeInCosting(opCode: OpCodeExtra): Boolean = allowedOpCodesInCosting.contains(opCode) + def isAllowedOpCodeInCosting(opCode: OpCodeExtra): Boolean = allowedOpCodesInCosting.contains(opCode) /** Returns extended op code assigned to the given IR graph node. */ def getOpCodeEx(d: Def[_]): OpCodeExtra = d match { From 402b15ade0e5e2336d1d9badcad8d536291b4e9f Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Thu, 8 Oct 2020 22:27:22 +0300 Subject: [PATCH 12/19] memory bound IRContext: increase IR capacity + more optimizations --- .../scala/scalan/util/CollectionUtil.scala | 1 + core/src/main/scala/scalan/TypeDescs.scala | 5 +++-- .../scala/sigmastate/eval/Evaluation.scala | 19 +++++++++++++++---- .../serialization/ConstantStore.scala | 2 +- 4 files changed, 20 insertions(+), 7 deletions(-) diff --git a/common/src/main/scala/scalan/util/CollectionUtil.scala b/common/src/main/scala/scalan/util/CollectionUtil.scala index bba14c516f..db16c17088 100644 --- a/common/src/main/scala/scalan/util/CollectionUtil.scala +++ b/common/src/main/scala/scalan/util/CollectionUtil.scala @@ -61,6 +61,7 @@ object CollectionUtil { res.toMap } + // TODO optimize: using cfor and avoiding allocations def joinSeqs[O, I, K](outer: GenIterable[O], inner: GenIterable[I])(outKey: O=>K, inKey: I=>K): GenIterable[(O,I)] = { val kvs = createMultiMap(inner.map(i => (inKey(i), i))) val res = outer.flatMap(o => { diff --git a/core/src/main/scala/scalan/TypeDescs.scala b/core/src/main/scala/scalan/TypeDescs.scala index 9b669e3311..b63d49c2bb 100644 --- a/core/src/main/scala/scalan/TypeDescs.scala +++ b/core/src/main/scala/scalan/TypeDescs.scala @@ -81,7 +81,7 @@ abstract class TypeDescs extends Base { self: Scalan => // } // } - // TODO optimize performance hot spot (45% of invokeUnlifted time) + // TODO optimize performance hot spot (45% of invokeUnlifted time), reduce allocation of Some final def getSourceValues(dataEnv: DataEnv, forWrapper: Boolean, stagedValues: AnyRef*): Seq[AnyRef] = { import OverloadHack._ val limit = stagedValues.length @@ -151,7 +151,7 @@ abstract class TypeDescs extends Base { self: Scalan => !!!(s"Cannot get Liftable instance for $this") final lazy val sourceType: RType[_] = liftable.sourceType - protected def collectMethods: Map[Method, MethodDesc] = Map() + protected def collectMethods: Map[Method, MethodDesc] = Map() // TODO optimize: all implementations protected lazy val methods: Map[Method, MethodDesc] = collectMethods // TODO benchamrk against the version below it @@ -244,6 +244,7 @@ abstract class TypeDescs extends Base { self: Scalan => m.getName } + // TODO optimize /** Build a mapping between methods of staged class and the corresponding methods of source class. * The methods are related using names. * The computed mapping can be used to project MethodCalls IR nodes back to the corresponding diff --git a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala index 25701bda36..0ce67b66ff 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/Evaluation.scala @@ -25,6 +25,7 @@ import special.Types._ import scala.collection.immutable.HashSet import scala.collection.mutable import scala.collection.mutable.ArrayBuffer +import spire.syntax.all.cfor /** This is a slice in IRContext cake which implements evaluation of graphs. */ @@ -886,15 +887,25 @@ object Evaluation { case SGroupElement => GroupElementRType case SAvlTree => AvlTreeRType case SSigmaProp => SigmaPropRType - case STuple(Seq(tpeA, tpeB)) => + case tup: STuple if tup.items.length == 2 => + val tpeA = tup.items(0) + val tpeB = tup.items(1) pairRType(stypeToRType(tpeA), stypeToRType(tpeB)) case STuple(items) => val types = items.toArray - tupleRType(types.map(t => stypeToRType(t).asInstanceOf[SomeType])) + val len = types.length + val rtypes = new Array[SomeType](len) + cfor(0)(_ < len, _ + 1) { i => + rtypes(i) = stypeToRType(types(i)).asInstanceOf[SomeType] + } + tupleRType(rtypes) case c: SCollectionType[a] => collRType(stypeToRType(c.elemType)) case o: SOption[a] => optionRType(stypeToRType(o.elemType)) - case SFunc(Seq(tpeArg), tpeRange, Nil) => funcRType(stypeToRType(tpeArg), stypeToRType(tpeRange)) - case _ => sys.error(s"Don't know how to convert SType $t to RType") + case SFunc(args, tpeRange, Nil) if args.length == 1 => + val tpeArg = args(0) + funcRType(stypeToRType(tpeArg), stypeToRType(tpeRange)) + case _ => + sys.error(s"Don't know how to convert SType $t to RType") }).asInstanceOf[RType[T#WrappedType]] /** Transforms RType descriptor of SigmaDsl, which is used during evaluation, diff --git a/sigmastate/src/main/scala/sigmastate/serialization/ConstantStore.scala b/sigmastate/src/main/scala/sigmastate/serialization/ConstantStore.scala index 70e9bbf8a6..0bd4588828 100644 --- a/sigmastate/src/main/scala/sigmastate/serialization/ConstantStore.scala +++ b/sigmastate/src/main/scala/sigmastate/serialization/ConstantStore.scala @@ -6,7 +6,7 @@ import sigmastate.lang.SigmaBuilder import debox.Buffer /** @hotspot used in deserialization (don't beautify this code) */ -class ConstantStore(private val constants: IndexedSeq[Constant[SType]] = Array[Constant[SType]]()) { +class ConstantStore(private val constants: IndexedSeq[Constant[SType]] = Constant.EmptySeq) { private val store: Buffer[Constant[SType]] = Buffer.fromIterable(constants) From 75cff7187d6d51b1e7d5a554e67dbb214c23de39 Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Fri, 9 Oct 2020 11:00:01 +0300 Subject: [PATCH 13/19] memory bound IRContext: allocation optimization in Transformer --- core/src/main/scala/scalan/Base.scala | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/core/src/main/scala/scalan/Base.scala b/core/src/main/scala/scalan/Base.scala index dd1b4b59fd..6f53457910 100644 --- a/core/src/main/scala/scalan/Base.scala +++ b/core/src/main/scala/scalan/Base.scala @@ -9,6 +9,7 @@ import scalan.compilation.GraphVizConfig import scalan.util.StringUtil import debox.{Buffer => DBuffer} import spire.syntax.all.cfor +import scala.collection.mutable /** * The Base trait houses common AST nodes. It also manages a list of encountered definitions which @@ -452,22 +453,28 @@ abstract class Base { scalan: Scalan => /** Transform a sequence of nodes into new sequence of nodes. */ final def apply[A](xs: Seq[Ref[A]]): Seq[Ref[A]] = { val len = xs.length - val res = new Array[Ref[A]](len) - cfor(0)(_ < len, _ + 1) { i => - res(i) = apply(xs(i)) + if (len == 0) EmptySeqOfSym.asInstanceOf[Seq[Ref[A]]] + else { + val res = new Array[Ref[A]](len) + cfor(0)(_ < len, _ + 1) { i => + res(i) = apply(xs(i)) + } + res } - res } /** Apply this transformer to the nodes present in the sequence, * and leave non-Ref items unchanged. */ final def apply(xs: Seq[Any])(implicit o: Overloaded1): Seq[Any] = { val len = xs.length - val res = new Array[Any](len) - cfor(0)(_ < len, _ + 1) { i => - val x = xs(i) match { case s: Ref[_] => apply(s); case s => s } - res(i) = x + if (len == 0) mutable.WrappedArray.empty + else { + val res = new Array[Any](len) + cfor(0)(_ < len, _ + 1) { i => + val x = xs(i) match { case s: Ref[_] => apply(s); case s => s } + res(i) = x + } + res } - res } def +[A](key: Sym, value: Sym): Transformer From f6faae0e9478f5d2b1ace50cc8b1ae7d200adabb Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Sun, 11 Oct 2020 19:50:20 +0300 Subject: [PATCH 14/19] memory bound IRContext: force GC in SigmaDslTesting --- sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala b/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala index a70d462a0e..ce066c9f26 100644 --- a/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala +++ b/sigmastate/src/test/scala/special/sigma/SigmaDslTesting.scala @@ -467,7 +467,7 @@ class SigmaDslTesting extends PropSpec printTestCases: Boolean = PrintTestCasesDefault, failOnTestVectors: Boolean = FailOnTestVectorsDefault, preGeneratedSamples: Option[Seq[A]] = None): Unit = { - + System.gc() // force GC to avoid occasional OOM exception val table = Table(("x", "y"), cases:_*) forAll(table) { (x: A, expectedRes: Try[B]) => val res = f.checkEquality(x, printTestCases).map(_._1) From 288ec24b22b2873f46a2ba53e81d07cec6a209de Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Wed, 14 Oct 2020 00:04:16 +0300 Subject: [PATCH 15/19] more cfor --- .../src/main/scala/sigmastate/eval/RuntimeCosting.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala b/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala index 4d6369ed49..a568c25f82 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala @@ -1232,7 +1232,11 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => case BlockValue(binds, res) => var curEnv = env - for (vd @ ValDef(n, _, b) <- binds) { + val len = binds.length + cfor(0)(_ < len, _ + 1) { i => + val vd = binds(i).asInstanceOf[ValDef] + val n = vd.id + val b = vd.rhs if (curEnv.contains(n)) error(s"Variable $n already defined ($n = ${curEnv(n)}", vd.sourceContext.toOption) val bC = evalNode(ctx, curEnv, b) curEnv = curEnv + (n -> bC) From 8c27f23afc068aa8ca5afbcb669ffd2e7b010cb3 Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Thu, 15 Oct 2020 20:01:20 +0300 Subject: [PATCH 16/19] aot-jit-switch.md added (v1) --- docs/aot-jit-switch.md | 161 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 161 insertions(+) create mode 100644 docs/aot-jit-switch.md diff --git a/docs/aot-jit-switch.md b/docs/aot-jit-switch.md new file mode 100644 index 0000000000..3b93d22559 --- /dev/null +++ b/docs/aot-jit-switch.md @@ -0,0 +1,161 @@ +## A protocol for AOT -> JIT switch via soft-fork + +### Definitions +Term | Description +-------------|------------ + _ScriptV1_ | The current version of ErgoTree (3.x releases) used in ErgoBlock v1. Bits 0-2 == 0 in the ErgoTree header byte. (see ErgoTree class). + _ScriptV2_ | The next version of ErgoTree (5.x releases) used after SF is activated. Bits 0-2 == 1 in the ErgoTree header byte. (see ErgoTree class). + R4.0-AOT-cost | cost estimation using v4.0 Ahead-Of-Time costing implementation + R4.0-AOT-verify | spending condition verification using v4.0 Ahead-Of-Time interpreter implementation + skip-pool-tx | skip pool transaction when building a new block candidate + skip-accept | skip script evaluation (both costing and verification) and treat it as True proposition (accept spending) + skip-reject | skip script evaluation (both costing and verification) and treat it as False proposition (reject spending) + accept-overcosted | skip script verification and treat it as True proposition (accept spending) if cost is too high. + validation state | a tuple of (`Block Type`, `SF Status`, `Script Version`) + SF Status | soft-fork status of the block + +### Script Validation Rules Summary + +Validation of scripts in blocks is defined for each release and depend on validation +context which includes type of block, SF status and script version. We denote blocks being +created by miners as `candidate` and those distributed across network as `mined`. + +Thus, we have 8 different validation contexts multiplied by 2 node versions +having in total 16 validation rules as summarized in the following table. + +Rule#| SF Status| Block Type| Script Version | Release | Validation Action +-----|----------|-----------|----------------|---------|-------- +1 | inactive | candidate | Script v1 | v4.0 | R4.0-AOT-cost, R4.0-AOT-verify +2 | inactive | candidate | Script v1 | v5.0 | R4.0-AOT-cost, R4.0-AOT-verify +3 | inactive | candidate | Script v2 | v4.0 | skip-pool-tx (cannot handle) +4 | inactive | candidate | Script v2 | v5.0 | skip-pool-tx (wait activation) +|||| +5 | inactive | mined | Script v1 | v4.0 | R4.0-AOT-cost, R4.0-AOT-verify +6 | inactive | mined | Script v1 | v5.0 | R4.0-AOT-cost, R4.0-AOT-verify +7 | inactive | mined | Script v2 | v4.0 | skip-reject (cannot handle) +8 | inactive | mined | Script v2 | v5.0 | skip-reject (wait activation) +|||| +9 | active | candidate | Script v1 | v4.0 | R4.0-AOT-cost, R4.0-AOT-verify +10 | active | candidate | Script v1 | v5.0 | R5.0-JIT-cost, R5.0-JIT-verify +11 | active | candidate | Script v2 | v4.0 | skip-pool-tx (cannot handle) +12 | active | candidate | Script v2 | v5.0 | R5.0-JIT-cost, R5.0-JIT-verify +|||| +13 | active | mined | Script v1 | v4.0 | R4.0-AOT-verify, accept-overcosted (rely on majority) +14 | active | mined | Script v1 | v5.0 | R5.0-JIT-cost, R5.0-JIT-verify +15 | active | mined | Script v2 | v4.0 | skip-accept (rely on majority) +16 | active | mined | Script v2 | v5.0 | R5.0-JIT-cost, R5.0-JIT-verify + +Observe the following properties of the validation rules. +1. Rules 1-4 mean the behaviour of v4.0 and v5.0 nodes is identical while creating new +block candidates before soft-fork. + +2. For any given tuple (`SF`, `Script Version`, `Release`) the same `ValidationAction` is +applied for both `candidate` and `mined` blocks. This proves the consistency of the rules +with respect to the change of the block status from `candidate` to `mined`. + +2. Each rule `i`, where `i` is an odd number, defines a `Validation Action` performed by +v4.0 nodes. Paired with it the `i+1` rule defines `Validation Action` performed by the +v5.0 nodes. Any such a pair `(i, i+1)` of rules have `Validation Actions` which are either +the same or equivalent with respect to the [Required properties of AOT and JIT +implementations](#required-properties-of-aot-and-jit-implementations). This proves +consistency of the validation actions across v4.0 and v5.0 nodes. + +3. After SF is activated (`SF Status == active`), both AOT-cost and AOT-verify +implementations are no longer used in `Validation Action`. This allow us to remove +AOT implementation in v5.0.1. To do that, we rely on Prop3 and update validation rules for +`mined` blocks by replacing rules 10, 12, 14, 16 with 17, 18, 19, 20 respectively. +See [the description of Rule 17](#rule-17) for details. + +Rule#| SF Status| Block Type| Script Version | Release | Validation Action +-----|-----------|----------|----------------|---------|-------- +17 | inactive | mined | Script v1 | v5.0.1 | R5.0-JIT-cost, R5.0-JIT-verify + +### Rule Descriptions + +#### Rules 1 and 2 + _Handle v1 script in candidate block when SF is not active._ + Ensured by _Prop1_ and _Prop2_ both v4.0 and v5.0 nodes use equivalent AOT-cost and + AOT-verify and thus have consensus. + Release v5.0 will contain both AOT and JIT versions simultaneously and thus can _behave as v4.0_ + before SF is activated and _behave as v5.0_ after SF activation. + +#### Rules 3 and 4 +_Both v4.0 and v5.0 nodes reject v2 scripts in new candidate blocks when SF is not active._ +This is ensured by _Prop5_ which is motivated by the following reasons: +- v4.0 nodes have no idea of v2 scripts, thus rejecting them altogether both in input and + output boxes of new candidate blocks +- v5.0 nodes are waiting for majority of nodes to vote, thus rejecting until SF is activated + +#### Rules 5 and 6 +These rules allow v1 scripts to enter blockchain even after SF is activated (for backward +compatibility with Apps). +In this case v4.0 node will _skip-accept_ v1 scripts in candidate blocks relying on the +majority. +Now, after SF is activated, the majority consist of v5.0 nodes and they will do +`R5.0-JIT-verify(Script v1)` which is equivalent to `R4.0-AOT-verify(Script v1)` due to +_Prop3_. + +One remark. A specific version of ErgoTree (in this case v1) assumes the fixed semantics of +all operations which doesn't depend on the interpreter implementation and we use this fact +to switch from `R4.0-AOT-verify` to `R5.0-JIT-verify` based implementation. + +However, for backward compatibility with Apps we DON'T NEED equivalence of costing, hence +exact cost estimation is not necessary. For this reason we have the relaxed condition in +_Prop4_, which means that any ScriptV1 admitted by `R4.0-AOT-cost` will also be admitted by +`R5.0-JIT-cost`. + +#### Rules 7 and 8 +After SF is activated v4.0 node `skip-accept` by relying on the majority of v5.0 +nodes, and also since it cannot handle v2 scripts. The majority of nodes uses new JIT +based implementation of ErgoTree interpreter: `R5.0-JIT-cost` procedure for costing and +`R5.0-JIT-verify` for verification. + +#### Rule 17 +The idea is to use `skip costing` action in Rule 17 which is applied to the old historical +blocks before the SF is activated. We assume it is safe to do for the following reasons: + - v5.0.1 node assumes the SF has already been activated so `inactive` block is somewhere + in the history. + + +### Required properties of AOT and JIT implementations + +_Prop 1._ AOT-verify is preserved: `forall s:ScriptV1, R4.0-AOT-verify(s) == R5.0-AOT-verify(s)` + +_Prop 2._ AOT-cost is preserved: `forall s:ScriptV1, R4.0-AOT-cost(s) == R5.0-AOT-cost(s)` + +_Prop 3._ JIT-verify can replace AOT-verify: `forall s:ScriptV1, R5.0-JIT-verify(s) == R4.0-AOT-verify(s)` + +_Prop 4._ JIT-cost is bound by AOT-cost: `forall s:ScriptV1, R5.0-JIT-cost(s) <= R4.0-AOT-cost(s)` + +_Prop 5._ ScriptV2 is rejected before SF is active: +`forall s:ScriptV2, if not SF is active => R4.0-verify(s) == R5.0-verify(s) == Reject` + +### Rules + +1) We will increment script from v1 to v2 by changing _version bits_ in ErgoTree header +(now they have value 0, and will be 1). + +2) All the nodes should collectively (based on voting data in the blockchain) to start producing +v2 blocks and rejecting v1 block candidates (while accepting mined historical v1 blocks) + +2) The 4.0 node, after soft-fork 5.0 is activated will check the version, and if it is > 0 +it will accept the script without validation (at this point the majority of nodes will be +5.0) + +3) after soft-fork 5.0 is activated, all 5.0 nodes will execute v0 scripts using current +AOT based interpreter, and v1 script using JIT based interpreter + +4) before soft-fork is activated all nodes (4.0 and 5.0 releases) will execute only v0 +scripts using AOT based interpreter and will reject v1 scripts + +5) both v0 and v1 scripts will be supported by the network and applications may create +transactions with both v0 and v1 scripts. + +6) Since we are going to fix some bugs, the behaviour of v0 and v1 scripts in general not +required to be precisely equivalent. So while old apps are supported unchanged, new apps +are encouraged to use new ErgoScript frontend which will compile v1 scripts + +### Specification +#### Notes +- Also, on v0 and v2, it would be better to avoid changing semantics of existing ops, +deprecation old and introducing new ones is cleaner \ No newline at end of file From e3d87275004c3628ce5715e4d7abb1db060a2a43 Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Fri, 16 Oct 2020 17:03:49 +0300 Subject: [PATCH 17/19] first draft version --- docs/aot-jit-switch.md | 193 +++++++++++++++++++++++------------------ 1 file changed, 108 insertions(+), 85 deletions(-) diff --git a/docs/aot-jit-switch.md b/docs/aot-jit-switch.md index 3b93d22559..71f57d0001 100644 --- a/docs/aot-jit-switch.md +++ b/docs/aot-jit-switch.md @@ -1,27 +1,38 @@ -## A protocol for AOT -> JIT switch via soft-fork +## The Protocol and Requirements to replace AOT with JIT via soft-fork + +The goal of this document is to specify requirements for v4.0, v5.0 and upcoming releases. +It also specifies rules of transaction validation in the Ergo network with respect of +soft-fork activation which should be followed by different versions of nodes. +The v4.x -> v5.x soft-fork is motivated by the goal of switching from AOT to JIT-based +costing algorithm and the simplified ErgoTree interpreter. ### Definitions -Term | Description --------------|------------ - _ScriptV1_ | The current version of ErgoTree (3.x releases) used in ErgoBlock v1. Bits 0-2 == 0 in the ErgoTree header byte. (see ErgoTree class). - _ScriptV2_ | The next version of ErgoTree (5.x releases) used after SF is activated. Bits 0-2 == 1 in the ErgoTree header byte. (see ErgoTree class). - R4.0-AOT-cost | cost estimation using v4.0 Ahead-Of-Time costing implementation +The text below we use the terms defined in the following table, please refer to it when +necessary. + +Term | Description +-----------------|------------ + _ScriptV1_ | The current version of ErgoTree (3.x releases) used in ErgoBlock v1. Bits 0-2 == 0 in the ErgoTree header byte. (see ErgoTree class). + _ScriptV2_ | The next version of ErgoTree (5.x releases) used after SF is activated. Bits 0-2 == 1 in the ErgoTree header byte. (see ErgoTree class). + R4.0-AOT-cost | cost estimation using v4.0 Ahead-Of-Time costing implementation R4.0-AOT-verify | spending condition verification using v4.0 Ahead-Of-Time interpreter implementation + R5.0-JIT-verify | spending condition verification using v5.0 simplified interpreter with Just-In-Time costing of fullReduction and AOT sigma protocol costing. skip-pool-tx | skip pool transaction when building a new block candidate skip-accept | skip script evaluation (both costing and verification) and treat it as True proposition (accept spending) skip-reject | skip script evaluation (both costing and verification) and treat it as False proposition (reject spending) - accept-overcosted | skip script verification and treat it as True proposition (accept spending) if cost is too high. - validation state | a tuple of (`Block Type`, `SF Status`, `Script Version`) - SF Status | soft-fork status of the block + Validation Context | a tuple of (`Block Type`, `SF Status`, `Script Version`) + SF Status | soft-fork status of the block. The status is `active` when enough votes have been collected. ### Script Validation Rules Summary -Validation of scripts in blocks is defined for each release and depend on validation -context which includes type of block, SF status and script version. We denote blocks being -created by miners as `candidate` and those distributed across network as `mined`. +Validation of scripts in blocks is defined for each release and depend on _validation +context_ which includes type of block, soft-fork status and script version. We denote +blocks being created by miners as `candidate` and those distributed across network as +`mined`. Thus, we have 8 different validation contexts multiplied by 2 node versions -having in total 16 validation rules as summarized in the following table. +having in total 16 validation rules as summarized in the following table, which +specifies the action a node have to take in the given contexts. Rule#| SF Status| Block Type| Script Version | Release | Validation Action -----|----------|-----------|----------------|---------|-------- @@ -36,39 +47,50 @@ Rule#| SF Status| Block Type| Script Version | Release | Validation Action 8 | inactive | mined | Script v2 | v5.0 | skip-reject (wait activation) |||| 9 | active | candidate | Script v1 | v4.0 | R4.0-AOT-cost, R4.0-AOT-verify -10 | active | candidate | Script v1 | v5.0 | R5.0-JIT-cost, R5.0-JIT-verify +10 | active | candidate | Script v1 | v5.0 | R5.0-JIT-verify 11 | active | candidate | Script v2 | v4.0 | skip-pool-tx (cannot handle) -12 | active | candidate | Script v2 | v5.0 | R5.0-JIT-cost, R5.0-JIT-verify +12 | active | candidate | Script v2 | v5.0 | R5.0-JIT-verify |||| -13 | active | mined | Script v1 | v4.0 | R4.0-AOT-verify, accept-overcosted (rely on majority) -14 | active | mined | Script v1 | v5.0 | R5.0-JIT-cost, R5.0-JIT-verify +13 | active | mined | Script v1 | v4.0 | skip-accept (rely on majority) +14 | active | mined | Script v1 | v5.0 | R5.0-JIT-verify 15 | active | mined | Script v2 | v4.0 | skip-accept (rely on majority) -16 | active | mined | Script v2 | v5.0 | R5.0-JIT-cost, R5.0-JIT-verify - -Observe the following properties of the validation rules. -1. Rules 1-4 mean the behaviour of v4.0 and v5.0 nodes is identical while creating new -block candidates before soft-fork. +16 | active | mined | Script v2 | v5.0 | R5.0-JIT-verify -2. For any given tuple (`SF`, `Script Version`, `Release`) the same `ValidationAction` is -applied for both `candidate` and `mined` blocks. This proves the consistency of the rules -with respect to the change of the block status from `candidate` to `mined`. +Note the following properties of the validation rules. -2. Each rule `i`, where `i` is an odd number, defines a `Validation Action` performed by -v4.0 nodes. Paired with it the `i+1` rule defines `Validation Action` performed by the -v5.0 nodes. Any such a pair `(i, i+1)` of rules have `Validation Actions` which are either -the same or equivalent with respect to the [Required properties of AOT and JIT -implementations](#required-properties-of-aot-and-jit-implementations). This proves -consistency of the validation actions across v4.0 and v5.0 nodes. +1. Rules 1-4 specify creation of new candidate blocks _before_ soft-fork is activated. +They require that the behaviour of v4.0 and v5.0 nodes should be identical. -3. After SF is activated (`SF Status == active`), both AOT-cost and AOT-verify -implementations are no longer used in `Validation Action`. This allow us to remove -AOT implementation in v5.0.1. To do that, we rely on Prop3 and update validation rules for -`mined` blocks by replacing rules 10, 12, 14, 16 with 17, 18, 19, 20 respectively. -See [the description of Rule 17](#rule-17) for details. +2. Rules 9-10 specify creation of new candidate blocks _after_ soft-fork is activated. +They are different for v4.0 and v5.0 nodes, but +[equivalent](#equivalence-properties-of-validation-actions) with respect to preserving +consensus (see also [Rule Descriptions](#rule-descriptions) for details). -Rule#| SF Status| Block Type| Script Version | Release | Validation Action ------|-----------|----------|----------------|---------|-------- -17 | inactive | mined | Script v1 | v5.0.1 | R5.0-JIT-cost, R5.0-JIT-verify +3. For any given tuple (`SF Status`, `Script Version`, `Release`) the _equivalent_ `ValidationAction` is +applied for both `candidate` and `mined` blocks. This proves the consistency of the rules +with respect to the change of the block status from `candidate` to `mined`, both before +and after soft-fork activation. + +4. Each rule `i`, where `i` is an odd number, defines a `Validation Action` performed by +a v4.0 node. Each such rule is paired with the `i+1` rule which defines `Validation Action` +performed by a v5.0 node. Any such a pair `(i, i+1)` of rules have `Validation Actions` +which are either the same or equivalent with respect to the [Equivalence Properties of +Validation Actions](#equivalence-properties-of-validation-actions). This proves +consistency of validation actions across v4.0 and v5.0 nodes. + +5. After SF is activated (`SF Status == active`), both AOT-cost and AOT-verify +implementations are no longer used in `Validation Action` of v5.0 nodes. The only context +where v5.0 node needs to use AOT based verification is given by Rule 6, which is to verify +a v1 script in a historical mined block before SF is activated. +However relying on _Prop 3_ we can replace Rule 6 in a new v5.0.1 release with the +following _equivalent_ rule + +Rule#| SF Status | Block Type| Script Version | Release | Validation Action +-----|-----------|-----------|----------------|---------|-------- +17 | inactive | mined | Script v1 | v5.0.1 | R5.0-JIT-verify + +This will allow to remove AOT implementation in v5.0.1 and simplify reference +implementation significantly. ### Rule Descriptions @@ -80,44 +102,66 @@ Rule#| SF Status| Block Type| Script Version | Release | Validation Action before SF is activated and _behave as v5.0_ after SF activation. #### Rules 3 and 4 -_Both v4.0 and v5.0 nodes reject v2 scripts in new candidate blocks when SF is not active._ +_Both v4.0 and v5.0 nodes reject v2 scripts in new candidate blocks when SF is NOT active._ This is ensured by _Prop5_ which is motivated by the following reasons: -- v4.0 nodes have no idea of v2 scripts, thus rejecting them altogether both in input and - output boxes of new candidate blocks -- v5.0 nodes are waiting for majority of nodes to vote, thus rejecting until SF is activated +- v4.0 nodes have no implementation of v2 scripts, thus rejecting them altogether both in + input and output boxes of new candidate blocks +- v5.0 nodes behave like v4.0 nodes and are waiting for majority of nodes to vote, thus +rejecting until SF is activated #### Rules 5 and 6 + _Handle v1 script in mined block when SF is not active._ + Similar to rules 1 and 2 but for `mined` blocks. + +#### Rules 7 and 8 +_Both v4.0 and v5.0 nodes reject v2 scripts in `mined` blocks when SF is NOT active._ +Similar to rules 3 and 4. + +#### Rules 9 and 10 These rules allow v1 scripts to enter blockchain even after SF is activated (for backward compatibility with Apps). -In this case v4.0 node will _skip-accept_ v1 scripts in candidate blocks relying on the -majority. Now, after SF is activated, the majority consist of v5.0 nodes and they will do `R5.0-JIT-verify(Script v1)` which is equivalent to `R4.0-AOT-verify(Script v1)` due to _Prop3_. -One remark. A specific version of ErgoTree (in this case v1) assumes the fixed semantics of -all operations which doesn't depend on the interpreter implementation and we use this fact -to switch from `R4.0-AOT-verify` to `R5.0-JIT-verify` based implementation. +To understand this pair of rules it is important to remember that a specific version of +ErgoTree (in this case v1) assumes the fixed semantics of all operations. This however +doesn't restrict the interpreter implementations and we use this fact +to switch from `R4.0-AOT-verify` to `R5.0-JIT-verify` relying on their equivalence +property _Prop 3_. However, for backward compatibility with Apps we DON'T NEED equivalence of costing, hence exact cost estimation is not necessary. For this reason we have the relaxed condition in _Prop4_, which means that any ScriptV1 admitted by `R4.0-AOT-cost` will also be admitted by -`R5.0-JIT-cost`. +`R5.0-JIT-cost`. For this reason, the v4.0 based application interacting with v5.0 node +will not notice the difference. -#### Rules 7 and 8 -After SF is activated v4.0 node `skip-accept` by relying on the majority of v5.0 -nodes, and also since it cannot handle v2 scripts. The majority of nodes uses new JIT -based implementation of ErgoTree interpreter: `R5.0-JIT-cost` procedure for costing and -`R5.0-JIT-verify` for verification. +#### Rules 11 and 12 +After SF is activated v4.0 node cannot verify transactions containing v2 scripts, as +a result the v4.0 node cannot include such transactions in new `candidate` blocks. Thus it +performs `skip-pool-tx` action essentially using only those mempool transactions which it +can handle. Majority of network nodes (v5.0) will do `R5.0-JIT-verify` validation of v2 +scripts in `candidate` blocks. -#### Rule 17 -The idea is to use `skip costing` action in Rule 17 which is applied to the old historical -blocks before the SF is activated. We assume it is safe to do for the following reasons: - - v5.0.1 node assumes the SF has already been activated so `inactive` block is somewhere - in the history. +#### Rules 13 and 14 +After SF is activated v4.0 node `skip-accept` verification of `mined` blocks by relying on +the majority of v5.0, essentially not performing verification even of v1 scripts. The +majority of nodes uses new JIT based implementation of ErgoTree interpreter +`R5.0-JIT-verify` procedure for costing and verification, have consensus about blocks and +v4.0 nodes just accept all mined blocks created elsewhere. +#### Rules 15 and 16 +After SF is activated v4.0 node `skip-accept` of `mined` blocks by relying on the majority +of v5.0 nodes since it cannot handle v2 scripts. In the same context, the majority of +nodes uses the new JIT based implementation of ErgoTree interpreter `R5.0-JIT-verify` +procedure for costing and verification. -### Required properties of AOT and JIT implementations + +### Equivalence Properties of Validation Actions + +In order to guarantee network consensus in the presence of both v4.0 and v5.0 nodes +the implementation of `R4.0-AOT-verify`, `R5.0-AOT-verify`, `R4.0-AOT-cost`, +`R5.0-AOT-cost`, `R5.0-JIT-verify` should satisfy the following properties. _Prop 1._ AOT-verify is preserved: `forall s:ScriptV1, R4.0-AOT-verify(s) == R5.0-AOT-verify(s)` @@ -130,32 +174,11 @@ _Prop 4._ JIT-cost is bound by AOT-cost: `forall s:ScriptV1, R5.0-JIT-cost(s) <= _Prop 5._ ScriptV2 is rejected before SF is active: `forall s:ScriptV2, if not SF is active => R4.0-verify(s) == R5.0-verify(s) == Reject` -### Rules - -1) We will increment script from v1 to v2 by changing _version bits_ in ErgoTree header -(now they have value 0, and will be 1). - -2) All the nodes should collectively (based on voting data in the blockchain) to start producing -v2 blocks and rejecting v1 block candidates (while accepting mined historical v1 blocks) - -2) The 4.0 node, after soft-fork 5.0 is activated will check the version, and if it is > 0 -it will accept the script without validation (at this point the majority of nodes will be -5.0) - -3) after soft-fork 5.0 is activated, all 5.0 nodes will execute v0 scripts using current -AOT based interpreter, and v1 script using JIT based interpreter - -4) before soft-fork is activated all nodes (4.0 and 5.0 releases) will execute only v0 -scripts using AOT based interpreter and will reject v1 scripts - -5) both v0 and v1 scripts will be supported by the network and applications may create -transactions with both v0 and v1 scripts. +### Other Notes -6) Since we are going to fix some bugs, the behaviour of v0 and v1 scripts in general not +- Since we are going to fix some bugs, the behaviour of v1 and v2 scripts in general not required to be precisely equivalent. So while old apps are supported unchanged, new apps -are encouraged to use new ErgoScript frontend which will compile v1 scripts +are encouraged to use new ErgoScript frontend which will compile v2 scripts. -### Specification -#### Notes -- Also, on v0 and v2, it would be better to avoid changing semantics of existing ops, +- Also, on v1 and v2, it would be better to avoid changing semantics of existing ops, deprecation old and introducing new ones is cleaner \ No newline at end of file From a23b07399e5f34787b3426ba2481ebf51af8066b Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Mon, 19 Oct 2020 17:47:16 +0300 Subject: [PATCH 18/19] clarifications --- docs/aot-jit-switch.md | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/docs/aot-jit-switch.md b/docs/aot-jit-switch.md index 71f57d0001..7bed588428 100644 --- a/docs/aot-jit-switch.md +++ b/docs/aot-jit-switch.md @@ -21,6 +21,7 @@ Term | Description skip-accept | skip script evaluation (both costing and verification) and treat it as True proposition (accept spending) skip-reject | skip script evaluation (both costing and verification) and treat it as False proposition (reject spending) Validation Context | a tuple of (`Block Type`, `SF Status`, `Script Version`) + Validation Action | an action taken by a node in the given validation context SF Status | soft-fork status of the block. The status is `active` when enough votes have been collected. ### Script Validation Rules Summary @@ -32,7 +33,7 @@ blocks being created by miners as `candidate` and those distributed across netwo Thus, we have 8 different validation contexts multiplied by 2 node versions having in total 16 validation rules as summarized in the following table, which -specifies the action a node have to take in the given contexts. +specifies the _validation action_ a node have to take in the given contexts. Rule#| SF Status| Block Type| Script Version | Release | Validation Action -----|----------|-----------|----------------|---------|-------- @@ -58,6 +59,11 @@ Rule#| SF Status| Block Type| Script Version | Release | Validation Action Note the following properties of the validation rules. +0. Please note that block creation is not a part of the Ergo consensus protocol, and +miners can do whatever they want, in particulare completely custom block assembly. For +this reason, the rules for `candidate` blocks are _reference implementation_ only, and +nodes are not required to follow them exactly. + 1. Rules 1-4 specify creation of new candidate blocks _before_ soft-fork is activated. They require that the behaviour of v4.0 and v5.0 nodes should be identical. @@ -82,8 +88,8 @@ consistency of validation actions across v4.0 and v5.0 nodes. implementations are no longer used in `Validation Action` of v5.0 nodes. The only context where v5.0 node needs to use AOT based verification is given by Rule 6, which is to verify a v1 script in a historical mined block before SF is activated. -However relying on _Prop 3_ we can replace Rule 6 in a new v5.0.1 release with the -following _equivalent_ rule +However relying on [Prop 3](#equivalence-properties-of-validation-actions) we can replace +Rule 6 in a new v5.0.1 release with the following _equivalent_ rule Rule#| SF Status | Block Type| Script Version | Release | Validation Action -----|-----------|-----------|----------------|---------|-------- @@ -119,7 +125,7 @@ Similar to rules 3 and 4. #### Rules 9 and 10 These rules allow v1 scripts to enter blockchain even after SF is activated (for backward -compatibility with Apps). +compatibility with applications). Now, after SF is activated, the majority consist of v5.0 nodes and they will do `R5.0-JIT-verify(Script v1)` which is equivalent to `R4.0-AOT-verify(Script v1)` due to _Prop3_. @@ -128,9 +134,9 @@ To understand this pair of rules it is important to remember that a specific ver ErgoTree (in this case v1) assumes the fixed semantics of all operations. This however doesn't restrict the interpreter implementations and we use this fact to switch from `R4.0-AOT-verify` to `R5.0-JIT-verify` relying on their equivalence -property _Prop 3_. +property [Prop 3](#equivalence-properties-of-validation-actions). -However, for backward compatibility with Apps we DON'T NEED equivalence of costing, hence +However, for backward compatibility with applications we DON'T NEED equivalence of costing, hence exact cost estimation is not necessary. For this reason we have the relaxed condition in _Prop4_, which means that any ScriptV1 admitted by `R4.0-AOT-cost` will also be admitted by `R5.0-JIT-cost`. For this reason, the v4.0 based application interacting with v5.0 node @@ -177,8 +183,13 @@ _Prop 5._ ScriptV2 is rejected before SF is active: ### Other Notes - Since we are going to fix some bugs, the behaviour of v1 and v2 scripts in general not -required to be precisely equivalent. So while old apps are supported unchanged, new apps -are encouraged to use new ErgoScript frontend which will compile v2 scripts. +required to be precisely equivalent. This is because `R5.0-JIT-verify` supports both v1 +and v2 scripts so that `R5.0-JIT-verify(Script_v1) == R4.0-AOT-verify(Script_v1)` due to +[Prop 1](#equivalence-properties-of-validation-actions) and `R5.0-JIT-verify(Script_v2)` +may implement an interpreter for a completely different language. Of cause, we don't want it to be _completely_ +different, in particular to ease migration. +So while old apps are supported unchanged, new apps are encouraged to use new ErgoScript +frontend which will compile v2 scripts. - Also, on v1 and v2, it would be better to avoid changing semantics of existing ops, deprecation old and introducing new ones is cleaner \ No newline at end of file From b8dd611888ad9764941093b36e6a640136e86ea8 Mon Sep 17 00:00:00 2001 From: Alexander Slesarenko Date: Tue, 27 Oct 2020 13:52:56 +0300 Subject: [PATCH 19/19] more optimizations: removed BigIntIsExactIntegral + ScalaDoc --- core/src/main/scala/scalan/Base.scala | 2 +- core/src/main/scala/scalan/Scalan.scala | 4 +- .../scala/scalan/primitives/NumericOps.scala | 7 +- .../main/scala/scalan/primitives/Thunks.scala | 130 ++++++++++++++++-- .../scala/sigmastate/eval/BigIntegerOps.scala | 18 ++- .../sigmastate/eval/RuntimeCosting.scala | 3 +- .../interpreter/InterpreterContext.scala | 8 +- 7 files changed, 143 insertions(+), 29 deletions(-) diff --git a/core/src/main/scala/scalan/Base.scala b/core/src/main/scala/scalan/Base.scala index 6f53457910..d151bbeb0c 100644 --- a/core/src/main/scala/scalan/Base.scala +++ b/core/src/main/scala/scalan/Base.scala @@ -237,7 +237,7 @@ abstract class Base { scalan: Scalan => override def mirror(t: Transformer): Ref[T] = self } - /** Describes lifting data values of type ST (Source Type) to IR nodes of the correspoding staged type T. + /** Describes lifting data values of type ST (Source Type) to IR nodes of the corresponding staged type T. * In general T is different type obtained by virtualization procedure from ST. * However ST can be the same as T as is the case for Byte, Int, String etc. */ diff --git a/core/src/main/scala/scalan/Scalan.scala b/core/src/main/scala/scalan/Scalan.scala index 9ce1599469..c2e9359261 100644 --- a/core/src/main/scala/scalan/Scalan.scala +++ b/core/src/main/scala/scalan/Scalan.scala @@ -1,8 +1,7 @@ package scalan -import scalan.compilation.GraphVizExport import scalan.primitives._ -import scalan.staged.{Transforming} +import scalan.staged.Transforming /** Aggregate cake with all inter-dependent modules assembled together. * Each instance of this class contains independent IR context, thus many @@ -30,7 +29,6 @@ class Scalan with Functions with IfThenElse with Transforming -// with GraphVizExport with Thunks with Entities with Modules diff --git a/core/src/main/scala/scalan/primitives/NumericOps.scala b/core/src/main/scala/scalan/primitives/NumericOps.scala index 5999ee10f2..deca140dc4 100644 --- a/core/src/main/scala/scalan/primitives/NumericOps.scala +++ b/core/src/main/scala/scalan/primitives/NumericOps.scala @@ -89,7 +89,12 @@ trait NumericOps extends Base { self: Scalan => } /** Descriptor of binary `%` operation (reminder of integral division). */ - case class IntegralMod[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("%", i) { +case class IntegralMod[T](i: ExactIntegral[T])(implicit elem: Elem[T]) extends DivOp[T]("%", i) { + /** Note, this is implemented using `ExactIntegral.rem` method which delegates to + * `scala.math.Integral.rem`. The later also implements `%` operator in Scala for + * numeric types. + * @see sigmastate.eval.NumericOps.BigIntIsIntegral + */ override def applySeq(x: T, y: T): T = i.rem(x, y) } diff --git a/core/src/main/scala/scalan/primitives/Thunks.scala b/core/src/main/scala/scalan/primitives/Thunks.scala index 6907b4a20d..323fd26042 100644 --- a/core/src/main/scala/scalan/primitives/Thunks.scala +++ b/core/src/main/scala/scalan/primitives/Thunks.scala @@ -10,51 +10,85 @@ import scalan.util.{Covariant, GraphUtil} import scala.collection.Seq -/** Slice in Scala cake with definitions of Thunk operations. */ +/** Slice in the [[Scalan]] cake with definitions of Thunk operations. + * See https://en.wikipedia.org/wiki/Thunk. + * Thunks are used to represent lazy operations in the graph IR. + * @see ApplyBinOpLazy, IfThenElseLazy + */ trait Thunks extends Functions with GraphVizExport { self: Scalan => type Th[+T] = Ref[Thunk[T]] + + /** Phantom type to define thunk-typed graph nodes and thunk based lazy operations. + * Usually used inside [[Ref]], see for example [[Th]]. + * See also for details http://gigiigig.github.io/tlp-step-by-step/phantom-types.html + */ trait Thunk[+A] { def value: A } + + /** A class of factory to create new Thunks by use `Thunk { ... }` expressions. */ class ThunkCompanion { def apply[T](block: => Ref[T]) = thunk_create(block) def forced[T](block: => Ref[T]) = thunk_create(block).force } + + /** Allow expressions like `Thunk { ... }` to create new Thunks. */ val Thunk: ThunkCompanion = new ThunkCompanion + /** Extension methods on `Ref[Thunk[T]]` values. */ implicit class RepThunkOps[T](t: Th[T]) { + /** Forces evaluation of the thunk to produce the delayed value. */ def force() = thunk_force(t) + + /** Creates a new thunk which, when forced, in turn forces `t` and then maps the resulting + * value using `f`. The application of `f` may be inlined to the new thunk body, or + * may be reified as [[Apply]] node, this depends on parameters of Lambda node. + * + * @param f reference to graph node of type [[Lambda]] + */ def map[R](f: Ref[T => R]): Th[R] = thunk_map(t, f) + + /** Creates a new thunk which, when forced, in turn forces `t` and then maps the resulting + * value using `f`. + * @param f scala function which is always inlined (staged) into the new think body + */ def map[R](f: Ref[T] => Ref[R]): Th[R] = thunk_map1(t, f) } + /** Thunk is an instance of container type class [[Cont]]. */ implicit val thunkCont: Cont[Thunk] = new Cont[Thunk] { - def lift[T](implicit eT: Elem[T]) = element[Thunk[T]] - def unlift[T](implicit eFT: Elem[Thunk[T]]) = eFT.eItem - def unapply[T](e: Elem[_]) = e match { + override def lift[T](implicit eT: Elem[T]) = element[Thunk[T]] + override def unlift[T](implicit eFT: Elem[Thunk[T]]) = eFT.eItem + override def unapply[T](e: Elem[_]) = e match { case e: ThunkElem[_] => Some(asElem[Thunk[T]](e)) case _ => None } } import Liftables._ - import scala.reflect.{ClassTag, classTag} + + /** Runtime representation of lazy values. + * Each [[Thunk]] typed graph node evaluates to a value of this type. + */ type SThunk[T] = () => T + /** Graph node to represent constants of type [[Thunk]]. + * @see scalan.Base.Liftables.Liftable */ case class ThunkConst[ST, T](constValue: SThunk[ST], lT: Liftable[ST, T]) extends BaseDef[Thunk[T]]()(thunkElement(lT.eW)) with LiftedConst[SThunk[ST], Thunk[T]] { val liftable: Liftable[SThunk[ST], Thunk[T]] = liftableThunk(lT) } + /** Implementation of Liftable type class for `Thunk[T]` given liftable for `T`. */ case class LiftableThunk[ST, T](lT: Liftable[ST, T]) extends Liftable[SThunk[ST], Thunk[T]] { import RType._ - def eW: Elem[Thunk[T]] = thunkElement(lT.eW) - def sourceType: RType[SThunk[ST]] = { + override def eW: Elem[Thunk[T]] = thunkElement(lT.eW) + override def sourceType: RType[SThunk[ST]] = { implicit val tST = lT.sourceType RType[SThunk[ST]] } - def lift(x: SThunk[ST]): Ref[Thunk[T]] = ThunkConst(x, lT) - def unlift(w: Ref[Thunk[T]]): SThunk[ST] = w.node match { + override def lift(x: SThunk[ST]): Ref[Thunk[T]] = ThunkConst(x, lT) + override def unlift(w: Ref[Thunk[T]]): SThunk[ST] = w.node match { case ThunkConst(x: SThunk[_], l) if l == lT => x.asInstanceOf[SThunk[ST]] case _ => unliftError(w) } @@ -64,6 +98,7 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => LiftableThunk(lT) + /** Implements a type descriptor of `Thunk[A]` type given the instance of `A`. */ case class ThunkElem[A](override val eItem: Elem[A]) extends EntityElem1[A, Thunk[A], Thunk](eItem, container[Thunk]) { override lazy val liftable = asLiftable[SThunk[_], Thunk[A]](liftableThunk(eItem.liftable)) @@ -72,14 +107,24 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => implicit def thunkElement[T](implicit eItem: Elem[T]): Elem[Thunk[T]] = cachedElemByClass(eItem)(classOf[ThunkElem[T]]) + + /** Implicit conversion (downcast) to access `ThunkElem.eItem` field. */ implicit def extendThunkElement[T](elem: Elem[Thunk[T]]): ThunkElem[T] = elem.asInstanceOf[ThunkElem[T]] + /** Graph node representing thunk with reified body. + * Each thunk node is a specialized implementation of AstGraph abstract class. + * @param _scheduleIds compact representation of thunk body, i.e. a sequence of graph + * nodes, which will be executed when the thunk is forced. Each + * node is given by its id. The node can be resolved using + * `getSym(id).node` expression. + * @param root graph node, which represents the result value of thunk forcing. + */ class ThunkDef[A](val root: Ref[A], _scheduleIds: => ScheduleIds) extends AstGraph with Def[Thunk[A]] { implicit def eA: Elem[A] = root.elem private var _selfType: Elem[Thunk[A]] = _ - def resultType: Elem[Thunk[A]] = + override def resultType: Elem[Thunk[A]] = if (_selfType != null) _selfType else { val res = thunkElement(eA) @@ -95,7 +140,7 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => * different nodeIds and hence they are NOT equal. * */ override lazy val hashCode: Int = _nodeId //41 * (41 + root.hashCode) + schedule.hashCode - def canEqual(other: Any) = other.isInstanceOf[ThunkDef[_]] + override def canEqual(other: Any) = other.isInstanceOf[ThunkDef[_]] override def equals(other: Any) = other match { case that: ThunkDef[_] => _nodeId == that._nodeId @@ -105,15 +150,15 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => // Product implementation - def productElement(n: Int): Any = n match { + override def productElement(n: Int): Any = n match { case 0 => root case _ => throw new NoSuchElementException(s"ThunkDef.productElement($n) is undefined") } - def productArity: Int = 1 + override def productArity: Int = 1 override def boundVars = EmptySeqOfSym - val roots: Seq[Sym] = Array(root) + override val roots: Seq[Sym] = Array(root) override lazy val freeVars: Seq[Sym] = if (schedule.isEmpty) roots else super.freeVars @@ -129,18 +174,27 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => } } + /** Helper object to handle construction of nested thunks. One instance is created for + * each ThunkDef under construction. This corresponds to syntactic nesting of thunks. + * @param parent the scope of the parent thunk + * @param thunkSym reference to the Graph node for which this scope is created. + */ class ThunkScope(val parent: ThunkScope, val thunkSym: Ref[Any]) { private val bodyIds: DSet[Int] = DSet.ofSize(16) private val bodyDefs: AVHashMap[Def[_], Def[_]] = AVHashMap(32) @inline final def isEmptyBody: Boolean = bodyIds.isEmpty + /** Add the given graph node (by symbol) to this scope. */ def +=(sym: Sym): Unit = { val d = sym.node bodyIds += d.nodeId bodyDefs.put(d, d) } + /** Sort graph nodes of this scope topologically using depth-first search along + * node dependencies (graph edges). This will give evaluation order of the thunk. + */ def scheduleForResult(root: Ref[Any]): DBuffer[Int] = { val sch = GraphUtil.depthFirstOrderFrom( DBuffer(root.node.nodeId), @@ -159,6 +213,11 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => sch } + /** Find the given node among definitions accessible from this scope. + * It searches in the chain of nested scopes for the first match. + * If not found, searches in the global scope. + * @return null if not found at all. + */ def findDef[T](d: Def[T]): Ref[T] = { val existingOpt = bodyDefs.get(d) if (existingOpt.isDefined) return existingOpt.get.self.asInstanceOf[Ref[T]] @@ -169,27 +228,49 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => } } + /** The stack of nested thunks during graph construction. */ class ThunkStack { + /** Stack is represented as simple list with th first element as a top of the stack. */ var stack = List[ThunkScope]() + + /** @return optional top scope. */ @inline def top: Nullable[ThunkScope] = if (stack.isEmpty) Nullable.None else Nullable(stack.head) + + /** Push new scope when start constructing new thunk. */ def push(e: ThunkScope): this.type = { stack = e :: stack; this } + + /** Pop new scope when thunk has been constructed. */ @inline def pop: ThunkScope = { val res = stack.head stack = stack.tail res } + + /** For a given thunk node, create a new scope and push it on the stack. */ def beginScope(thunkSym: Ref[Any]): ThunkScope = { val parent = if (stack.isEmpty) null else stack.head val scope = new ThunkScope(parent, thunkSym) this.push(scope) scope } + + /** End current scope and pop it from the stack. */ @inline def endScope(): Unit = { this.pop } } + protected val thunkStack = new ThunkStack implicit def repToThunk[A](block: Ref[A]): Ref[Thunk[A]] = thunk_create(block) + /** Constructs a new thunk node by executing the given `block` and collecting all the + * graph node created along the way. + * This methods: + * 1) starts a new nested ThunkScope, + * 2) executes the `block` to obtain resulting graph node + * 3) schedule thunk body for execution order + * 4) adds a new ThunkDef node and returns its reference. + * @return a reference to the newly created [[ThunkDef]] node + */ def thunk_create[A](block: => Ref[A]): Ref[Thunk[A]] = { var scheduleIds: ScheduleIds = null val resPH = placeholder(Lazy(AnyElement)).asInstanceOf[Ref[A]] // will be known after block is evaluated @@ -222,18 +303,38 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => } } + /** Specifies thunk staging strategy with respect to handling thunk_force operation. + * @see thunk_force + */ var isInlineThunksOnForce = false + /** Inlines the given thunk by cloning all its nodes and applying the given substitution (transformer). + * @param thunk reference to the thunk node + * @param subst transformer to be applied for each mirrored (cloned) node. + * @return the reference to the graph node, which represents the resulting value of the thunk + */ def forceThunkByMirror[A](thunk: Th[A], subst: MapTransformer = MapTransformer.empty()): Ref[A] = { val th = thunk.node.asInstanceOf[ThunkDef[A]] forceThunkDefByMirror(th, subst) } + + /** Inlines the given thunk by cloning all its nodes and applying the given substitution (transformer). + * @param th the thunk node + * @param subst transformer to be applied for each mirrored (cloned) node. + * @return the reference to the graph node, which represents the resulting value of the thunk + */ def forceThunkDefByMirror[A](th: ThunkDef[A], subst: MapTransformer = MapTransformer.empty()): Ref[A] = { val body = th.scheduleIds val t = DefaultMirror.mirrorSymbols(subst, NoRewriting, th, body) t(th.root) } + /** Logical force of the thunk. Depending on isInlineThunksOnForce it either inlines the + * thunk body or creates a new ThunkForce node. + * + * @return a reference to the graph node, which represent the result of the thunk's + * evaluation. + */ def thunk_force[A](t: Th[A]): Ref[A] = if (isInlineThunksOnForce) t.node match { @@ -244,6 +345,7 @@ trait Thunks extends Functions with GraphVizExport { self: Scalan => else ThunkForce(t) + /** Graph node to represent thunk forcing operation. */ case class ThunkForce[A](thunk: Ref[Thunk[A]]) extends Def[A] { implicit def resultType = thunk.elem.eItem override def transform(t: Transformer) = ThunkForce(t(thunk)) diff --git a/sigmastate/src/main/scala/sigmastate/eval/BigIntegerOps.scala b/sigmastate/src/main/scala/sigmastate/eval/BigIntegerOps.scala index 547092bad6..799382cf10 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/BigIntegerOps.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/BigIntegerOps.scala @@ -53,6 +53,17 @@ object NumericOps { def toFloat(x: BigInt): Float = CostingSigmaDslBuilder.toBigInteger(x).floatValue() def toDouble(x: BigInt): Double = CostingSigmaDslBuilder.toBigInteger(x).doubleValue() } + + /** The instance of Integral for BigInt. + * + * Note: ExactIntegral is not defined for [[special.sigma.BigInt]]. + * This is because arithmetic BigInt operations are handled specially + * (see `case op: ArithOp[t] if op.tpe == SBigInt =>` in RuntimeCosting.scala). + * As result [[scalan.primitives.UnBinOps.ApplyBinOp]] nodes are not created for BigInt + * operations, and hence operation descriptors such as + * [[scalan.primitives.NumericOps.IntegralDivide]] and + * [[scalan.primitives.NumericOps.IntegralMod]] are not used for BigInt. + */ implicit object BigIntIsIntegral extends BigIntIsIntegral with OrderingOps.BigIntOrdering implicit object BigIntIsExactNumeric extends ExactNumeric[BigInt] { @@ -62,13 +73,6 @@ object NumericOps { override def times(x: BigInt, y: BigInt): BigInt = n.times(x, y) } - implicit object BigIntIsExactIntegral extends ExactIntegral[BigInt] { - val n = BigIntIsIntegral - override def plus(x: BigInt, y: BigInt): BigInt = n.plus(x, y) - override def minus(x: BigInt, y: BigInt): BigInt = n.minus(x, y) - override def times(x: BigInt, y: BigInt): BigInt = n.times(x, y) - } - implicit object BigIntIsExactOrdering extends ExactOrderingImpl[BigInt](BigIntIsIntegral) } diff --git a/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala b/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala index a568c25f82..02124bac5d 100644 --- a/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala +++ b/sigmastate/src/main/scala/sigmastate/eval/RuntimeCosting.scala @@ -903,8 +903,7 @@ trait RuntimeCosting extends CostingRules { IR: IRContext => (ByteElement, ByteIsExactIntegral), (ShortElement, ShortIsExactIntegral), (IntElement, IntIsExactIntegral), - (LongElement, LongIsExactIntegral), - (bigIntElement, BigIntIsExactIntegral) + (LongElement, LongIsExactIntegral) ) private lazy val elemToExactOrderingMap = Map[Elem[_], ExactOrdering[_]]( (ByteElement, ByteIsExactOrdering), diff --git a/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala b/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala index ebe83519b0..5965c533e1 100644 --- a/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala +++ b/sigmastate/src/main/scala/sigmastate/interpreter/InterpreterContext.scala @@ -64,6 +64,12 @@ trait InterpreterContext { /** Creates a new instance with given validation settings. */ def withValidationSettings(newVs: SigmaValidationSettings): InterpreterContext - /** Creates `special.sigma.Context` instance based on this context. */ + /** Creates `special.sigma.Context` instance based on this context. The created instance + * contains all data represented using types form [[special.sigma]] package. + * These types are used internally by ErgoTree interpreter. + * Thus, this method performs transformation from Ergo to internal Sigma representation + * of all context data. + * @see sigmastate.eval.Evaluation + */ def toSigmaContext(isCost: Boolean, extensions: Map[Byte, AnyValue] = Map()): sigma.Context }