Skip to content

Commit

Permalink
Merge pull request #717 from ScorexFoundation/develop
Browse files Browse the repository at this point in the history
Release v4.0.2
  • Loading branch information
aslesarenko authored Mar 11, 2021
2 parents 7cea39a + e093a0f commit fada073
Show file tree
Hide file tree
Showing 40 changed files with 845 additions and 178 deletions.
5 changes: 0 additions & 5 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,6 @@ name := "sigma-state"
lazy val scala212 = "2.12.10"
lazy val scala211 = "2.11.12"

javacOptions ++=
"-source" :: "1.7" ::
"-target" :: "1.7" ::
Nil

lazy val allConfigDependency = "compile->compile;test->test"

lazy val commonSettings = Seq(
Expand Down
2 changes: 1 addition & 1 deletion common/src/main/scala/scalan/util/Extensions.scala
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ object Extensions {
def toByte: Byte = if (b) 1 else 0
}

/** @hotspot it is used in deserialization so we avoid allocation by any means. */
/** HOTSPOT: it is used in deserialization so we avoid allocation by any means. */
@inline final def toUByte(b: Byte) = b & 0xFF

implicit class ByteOps(val b: Byte) extends AnyVal {
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/scala/scalan/Base.scala
Original file line number Diff line number Diff line change
Expand Up @@ -538,7 +538,7 @@ abstract class Base { scalan: Scalan =>
/** Transforms this object into new one by applying `t` to every Ref inside
* its structure. The structure is build out of Seq, Array, Option and Def values.
* Other structure items remain unchanged and copied to the new instance.
* @hotspot don't beautify the code */
* HOTSPOT: don't beautify the code */
protected def transformProductParam(x: Any, t: Transformer): Any = x match {
case (_: UnOp[_, _]) | (_: BinOp[_, _]) =>
// allows use of context bounds in classes extending UnOp/BinOp.
Expand Down Expand Up @@ -687,7 +687,7 @@ abstract class Base { scalan: Scalan =>
/** Create or find symbol (node Ref) which refers to the given node in the table of all created symbols.
* The d.nodeId is the index in the _symbolTable which is DBuffer (backed by Array)
* @return new of existing symbol
* @hotspot the method should be allocation-free (make it sure by examining the generated Java code)
* HOTSPOT: the method should be allocation-free (make it sure by examining the generated Java code)
*/
final def updateSymbolTable[T](s: Ref[T], d: Def[T]): Ref[T] = {
val id = d.nodeId
Expand Down Expand Up @@ -769,7 +769,7 @@ abstract class Base { scalan: Scalan =>
* @param d node to be added to the head of nodes
* @param newSym producer of the reference to be used as the reference to `d` node.
* @return return a reference to `d` node in the heap
* @hotspot */
* HOTSPOT: */
def findOrCreateDefinition[T](d: Def[T], newSym: => Ref[T]): Ref[T] = {
val optScope = thunkStack.top
var sym = optScope match {
Expand Down
8 changes: 4 additions & 4 deletions core/src/main/scala/scalan/staged/AstGraphs.scala
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ trait AstGraphs extends Transforming { self: Scalan =>
* If the graph represents a compound definition (Lambda, Thunk etc),
* then each item in `freeVars` is used in the body, but not part of it.
* Intersection of free vars with bound vars is empty.
* @hotspot don't beautify the code
* HOTSPOT: don't beautify the code
*/
def freeVars: Seq[Sym] = {
val sch = schedule.toArray
Expand Down Expand Up @@ -89,7 +89,7 @@ trait AstGraphs extends Transforming { self: Scalan =>
def scheduleIds: DBuffer[Int]

/** Sequence of node references forming a schedule.
* @hotspot don't beautify the code */
* HOTSPOT: don't beautify the code */
lazy val schedule: Schedule = {
val ids = scheduleIds
val len = ids.length
Expand Down Expand Up @@ -155,7 +155,7 @@ trait AstGraphs extends Transforming { self: Scalan =>

/** Build usage information induced by the given schedule.
* For each symbol of the schedule a GraphNode is created and usages are collected.
* @hotspot don't beautify the code
* HOTSPOT: don't beautify the code
*/
def buildUsageMap(schedule: Schedule, usingDeps: Boolean): DMap[Int, GraphNode] = {
val len = schedule.length
Expand Down Expand Up @@ -203,7 +203,7 @@ trait AstGraphs extends Transforming { self: Scalan =>

def hasManyUsagesGlobal(s: Sym): Boolean = globalUsagesOf(s).length > 1

/** @hotspot for performance we return mutable structure, but it should never be changed. */
/** HOTSPOT: for performance we return mutable structure, but it should never be changed. */
def usagesOf(id: Int): DBuffer[Int] = {
val node = usageMap.getOrElse(id, null)
if (node == null) return emptyDBufferOfInt
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/scala/scalan/staged/Transforming.scala
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ trait Transforming { self: Scalan =>
}

/** Concrete and default implementation of Transformer using underlying HashMap.
* @hotspot don't beatify the code */
* HOTSPOT: don't beatify the code */
case class MapTransformer(private val subst: util.HashMap[Sym, Sym]) extends Transformer {
def this(substPairs: (Sym, Sym)*) {
this({
Expand Down Expand Up @@ -145,7 +145,7 @@ trait Transforming { self: Scalan =>

/** Base class for mirrors of graph nodes. Provides default implementations which can be
* overriden if special logic is required.
* @hotspot don't beautify the code */
* HOTSPOT: don't beautify the code */
abstract class Mirror {
def apply[A](t: Transformer, rewriter: Rewriter, node: Ref[A], d: Def[A]): Sym = d.mirror(t)

Expand Down Expand Up @@ -246,7 +246,7 @@ trait Transforming { self: Scalan =>
}
}

/** @hotspot */
/** HOTSPOT: */
def mirrorSymbols(t0: Transformer, rewriter: Rewriter, g: AstGraph, nodes: DBuffer[Int]) = {
var t: Transformer = t0
cfor(0)(_ < nodes.length, _ + 1) { i =>
Expand Down
6 changes: 3 additions & 3 deletions sigmastate/src/main/scala/org/ergoplatform/ErgoBox.scala
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ import scala.runtime.ScalaRunTime
* @param creationHeight - height when a transaction containing the box was created.
* This height is declared by user and should not exceed height of the block,
* containing the transaction with this box.
* @hotspot don't beautify the code of this class
* HOTSPOT: don't beautify the code of this class
*/
class ErgoBox(
override val value: Long,
Expand Down Expand Up @@ -149,7 +149,7 @@ object ErgoBox {

val maxRegisters: Int = SigmaConstants.MaxRegisters.value

/** @hotspot don't beautify the code in this companion */
/** HOTSPOT: don't beautify the code in this companion */
private val _mandatoryRegisters: Array[MandatoryRegisterId] = Array(R0, R1, R2, R3)
val mandatoryRegisters: Seq[MandatoryRegisterId] = _mandatoryRegisters

Expand All @@ -169,7 +169,7 @@ object ErgoBox {

val registerByName: Map[String, RegisterId] = allRegisters.map(r => s"R${r.number}" -> r).toMap

/** @hotspot called from ErgoBox serializer */
/** HOTSPOT: called from ErgoBox serializer */
@inline final def registerByIndex(index: Int): RegisterId = allRegisters(index)

def findRegisterByIndex(i: Int): Option[RegisterId] =
Expand Down
40 changes: 22 additions & 18 deletions sigmastate/src/main/scala/org/ergoplatform/ErgoBoxCandidate.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import java.util

import org.ergoplatform.ErgoBox._
import org.ergoplatform.settings.ErgoAlgos
import scorex.crypto.hash.Digest32
import scorex.util.{bytesToId, ModifierId}
import sigmastate.Values._
import sigmastate._
Expand Down Expand Up @@ -115,7 +114,7 @@ object ErgoBoxCandidate {
*/
val UndefinedBoxRef: Coll[Byte] = Array.fill(34)(0: Byte).toColl

/** @hotspot don't beautify the code */
/** HOTSPOT: don't beautify the code */
object serializer extends SigmaSerializer[ErgoBoxCandidate, ErgoBoxCandidate] {

/** Helper method for [[ErgoBoxCandidate]] serialization.
Expand Down Expand Up @@ -180,28 +179,31 @@ object ErgoBoxCandidate {
/** Helper method to parse [[ErgoBoxCandidate]] previously serialized by
* [[serializeBodyWithIndexedDigests()]].
*/
def parseBodyWithIndexedDigests(digestsInTx: Option[Coll[TokenId]], r: SigmaByteReader): ErgoBoxCandidate = {
def parseBodyWithIndexedDigests(digestsInTx: Array[Array[Byte]], r: SigmaByteReader): ErgoBoxCandidate = {
val previousPositionLimit = r.positionLimit
r.positionLimit = r.position + ErgoBox.MaxBoxSize
val value = r.getULong() // READ
val tree = DefaultSerializer.deserializeErgoTree(r, SigmaSerializer.MaxPropositionSize) // READ
val creationHeight = r.getUInt().toInt // READ
val nTokens = r.getUByte() // READ
val tokenIds = new Array[Digest32](nTokens)
val tokenIds = new Array[Array[Byte]](nTokens)
val tokenAmounts = new Array[Long](nTokens)
val tokenIdSize = TokenId.size
cfor(0)(_ < nTokens, _ + 1) { i =>
val tokenId = if (digestsInTx.isDefined) {
val digestIndex = r.getUInt().toInt // READ
val digests = digestsInTx.get
if (!digests.isDefinedAt(digestIndex)) sys.error(s"failed to find token id with index $digestIndex")
digests(digestIndex)
} else {
r.getBytes(tokenIdSize) // READ
if (digestsInTx != null) {
val nDigests = digestsInTx.length
cfor(0)(_ < nTokens, _ + 1) { i =>
val digestIndex = r.getUInt().toInt // READ
if (digestIndex < 0 || digestIndex >= nDigests)
sys.error(s"failed to find token id with index $digestIndex")
val amount = r.getULong() // READ
tokenIds(i) = digestsInTx(digestIndex)
tokenAmounts(i) = amount
}
} else {
val tokenIdSize = TokenId.size // optimization: access the value once
cfor(0)(_ < nTokens, _ + 1) { i =>
tokenIds(i) = r.getBytes(tokenIdSize) // READ
tokenAmounts(i) = r.getULong() // READ
}
val amount = r.getULong() // READ
tokenIds(i) = tokenId.asInstanceOf[Digest32]
tokenAmounts(i) = amount
}
val tokens = Colls.pairCollFromArrays(tokenIds, tokenAmounts)

Expand All @@ -215,11 +217,13 @@ object ErgoBoxCandidate {
b += ((reg, v)) // don't use `->` since it incur additional wrapper overhead
}
r.positionLimit = previousPositionLimit
new ErgoBoxCandidate(value, tree, creationHeight, tokens, b.result())
new ErgoBoxCandidate(
value, tree, creationHeight,
tokens.asInstanceOf[Coll[(TokenId, Long)]], b.result())
}

override def parse(r: SigmaByteReader): ErgoBoxCandidate = {
parseBodyWithIndexedDigests(None, r)
parseBodyWithIndexedDigests(digestsInTx = null, r)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,20 @@ package org.ergoplatform
import sigmastate.SCollection.SByteArray
import sigmastate.Values._
import sigmastate.eval.IRContext
import sigmastate.interpreter.Interpreter
import sigmastate.interpreter.{Interpreter, PrecompiledScriptProcessor}
import sigmastate.utxo._


/** Base class of verifying interpreter which expects ErgoLikeContext as input of
* verify method.
* It implements deserialization of register of SELF box.
*/
class ErgoLikeInterpreter(implicit val IR: IRContext) extends Interpreter {

override type CTX <: ErgoLikeContext

override val precompiledScriptProcessor: PrecompiledScriptProcessor = PrecompiledScriptProcessor.Default

override def substDeserialize(context: CTX, updateContext: CTX => Unit, node: SValue): Option[SValue] = node match {
case d: DeserializeRegister[_] =>
context.boxesToSpend(context.selfIndex).get(d.reg).flatMap { v =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ package org.ergoplatform

import org.ergoplatform.ErgoBox.TokenId
import scorex.crypto.authds.ADKey
import scorex.crypto.hash.{Blake2b256, Digest32}
import scorex.crypto.hash.Blake2b256
import scorex.util._
import sigmastate.SType._
import sigmastate.eval.Extensions._
Expand All @@ -13,34 +13,34 @@ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter}
import special.collection.ExtensionMethods._
import spire.syntax.all.cfor

import scala.collection.mutable
import scala.util.Try

trait ErgoBoxReader {
def byId(boxId: ADKey): Try[ErgoBox]
}

/**
* Base trait of a real transaction to be used in Ergo network.
/** Base trait of a real transaction to be used in Ergo network.
* May be in unsigned (`UnsignedErgoLikeTransaction`) or in signed (`ErgoLikeTransaction`) version.
*
* Consists of:
*
* @param inputs - inputs, that will be spent by this transaction.
* @param dataInputs - inputs, that are not going to be spent by transaction, but will be
* reachable from inputs scripts. `dataInputs` scripts will not be executed,
* thus their scripts costs are not included in transaction cost and
* they do not contain spending proofs.
* @param outputCandidates - box candidates to be created by this transaction.
* Differ from ordinary ones in that they do not include transaction id and index
*/
trait ErgoLikeTransactionTemplate[IT <: UnsignedInput] {
/** Inputs, that are not going to be spent by transaction, but will be
* reachable from inputs scripts. `dataInputs` scripts will not be executed,
* thus their scripts costs are not included in transaction cost and
* they do not contain spending proofs.
*/
val dataInputs: IndexedSeq[DataInput]

/** Inputs, that will be spent by this transaction. */
val inputs: IndexedSeq[IT]

/** Box candidates to be created by this transaction.
* Differ from ordinary ones in that they do not include transaction id and index.
*/
val outputCandidates: IndexedSeq[ErgoBoxCandidate]

require(outputCandidates.size <= Short.MaxValue)

/** Identifier of this transaction as state Modifier. */
val id: ModifierId

lazy val outputs: IndexedSeq[ErgoBox] =
Expand Down Expand Up @@ -143,34 +143,37 @@ object ErgoLikeTransactionSerializer extends SigmaSerializer[ErgoLikeTransaction
}
}

/** HOTSPOT: don't beautify the code */
override def parse(r: SigmaByteReader): ErgoLikeTransaction = {
// parse transaction inputs
val inputsCount = r.getUShort()
val inputsBuilder = mutable.ArrayBuilder.make[Input]()
for (_ <- 0 until inputsCount) {
inputsBuilder += Input.serializer.parse(r)
val inputs = new Array[Input](inputsCount)
cfor(0)(_ < inputsCount, _ + 1) { i =>
inputs(i) = Input.serializer.parse(r)
}

// parse transaction data inputs
val dataInputsCount = r.getUShort()
val dataInputsBuilder = mutable.ArrayBuilder.make[DataInput]()
for (_ <- 0 until dataInputsCount) {
dataInputsBuilder += DataInput(ADKey @@ r.getBytes(ErgoBox.BoxId.size))
val dataInputs = new Array[DataInput](dataInputsCount)
cfor(0)(_ < dataInputsCount, _ + 1) { i =>
dataInputs(i) = DataInput(ADKey @@ r.getBytes(ErgoBox.BoxId.size))
}

// parse distinct ids of tokens in transaction outputs
val tokensCount = r.getUInt().toInt
val tokensBuilder = mutable.ArrayBuilder.make[TokenId]()
for (_ <- 0 until tokensCount) {
tokensBuilder += Digest32 @@ r.getBytes(TokenId.size)
val tokens = new Array[Array[Byte]](tokensCount)
cfor(0)(_ < tokensCount, _ + 1) { i =>
tokens(i) = r.getBytes(TokenId.size)
}
val tokens = tokensBuilder.result().toColl
// parse outputs

// parse outputs
val outsCount = r.getUShort()
val outputCandidatesBuilder = mutable.ArrayBuilder.make[ErgoBoxCandidate]()
for (_ <- 0 until outsCount) {
outputCandidatesBuilder += ErgoBoxCandidate.serializer.parseBodyWithIndexedDigests(Some(tokens), r)
val outputCandidates = new Array[ErgoBoxCandidate](outsCount)
cfor(0)(_ < outsCount, _ + 1) { i =>
outputCandidates(i) = ErgoBoxCandidate.serializer.parseBodyWithIndexedDigests(tokens, r)
}
new ErgoLikeTransaction(inputsBuilder.result(), dataInputsBuilder.result(), outputCandidatesBuilder.result())

new ErgoLikeTransaction(inputs, dataInputs, outputCandidates)
}

}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ sealed class MapSigmaValidationSettings(private val map: Map[Short, (ValidationR
override def iterator: Iterator[(Short, (ValidationRule, RuleStatus))] = map.iterator
override def get(id: Short): Option[(ValidationRule, RuleStatus)] = map.get(id)

/** @hotspot don't beautify this code */
/** HOTSPOT: don't beautify this code */
override def getStatus(id: Short): Option[RuleStatus] = {
val statusOpt = map.get(id)
val res = if (statusOpt.isDefined) Some(statusOpt.get._2) else None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ case class ValidationRule(
/** Check the rule is registered and enabled.
* Since it is easy to forget to register new rule, we need to do this check.
* But because it is hotspot, we do this check only once for each rule.
* @hotspot executed on every typeCode and opCode during script deserialization
* HOTSPOT: executed on every typeCode and opCode during script deserialization
*/
@inline protected final def checkRule(): Unit = {
if (!_checked) {
Expand Down
Loading

0 comments on commit fada073

Please sign in to comment.